code_generator_arm.cc revision 66ce173a40eff4392e9949ede169ccf3108be2db
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "code_generator_arm.h" 18 19#include "entrypoints/quick/quick_entrypoints.h" 20#include "gc/accounting/card_table.h" 21#include "mirror/array-inl.h" 22#include "mirror/art_method.h" 23#include "mirror/class.h" 24#include "thread.h" 25#include "utils/assembler.h" 26#include "utils/arm/assembler_arm.h" 27#include "utils/arm/managed_register_arm.h" 28#include "utils/stack_checks.h" 29 30namespace art { 31 32namespace arm { 33 34static SRegister FromDToLowS(DRegister reg) { 35 return static_cast<SRegister>(reg * 2); 36} 37 38static constexpr bool kExplicitStackOverflowCheck = false; 39 40static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2; // LR, R6, R7 41static constexpr int kCurrentMethodStackOffset = 0; 42 43static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 }; 44static constexpr size_t kRuntimeParameterCoreRegistersLength = 45 arraysize(kRuntimeParameterCoreRegisters); 46static constexpr DRegister kRuntimeParameterFpuRegisters[] = { }; 47static constexpr size_t kRuntimeParameterFpuRegistersLength = 0; 48 49class InvokeRuntimeCallingConvention : public CallingConvention<Register, DRegister> { 50 public: 51 InvokeRuntimeCallingConvention() 52 : CallingConvention(kRuntimeParameterCoreRegisters, 53 kRuntimeParameterCoreRegistersLength, 54 kRuntimeParameterFpuRegisters, 55 kRuntimeParameterFpuRegistersLength) {} 56 57 private: 58 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention); 59}; 60 61#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> 62 63class SlowPathCodeARM : public SlowPathCode { 64 public: 65 SlowPathCodeARM() : entry_label_(), exit_label_() {} 66 67 Label* GetEntryLabel() { return &entry_label_; } 68 Label* GetExitLabel() { return &exit_label_; } 69 70 private: 71 Label entry_label_; 72 Label exit_label_; 73 74 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM); 75}; 76 77class NullCheckSlowPathARM : public SlowPathCodeARM { 78 public: 79 explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {} 80 81 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 82 __ Bind(GetEntryLabel()); 83 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowNullPointer).Int32Value(); 84 __ LoadFromOffset(kLoadWord, LR, TR, offset); 85 __ blx(LR); 86 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); 87 } 88 89 private: 90 HNullCheck* const instruction_; 91 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM); 92}; 93 94class StackOverflowCheckSlowPathARM : public SlowPathCodeARM { 95 public: 96 StackOverflowCheckSlowPathARM() {} 97 98 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 99 __ Bind(GetEntryLabel()); 100 __ LoadFromOffset(kLoadWord, PC, TR, 101 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value()); 102 } 103 104 private: 105 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM); 106}; 107 108class SuspendCheckSlowPathARM : public SlowPathCodeARM { 109 public: 110 explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor) 111 : instruction_(instruction), successor_(successor) {} 112 113 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 114 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 115 __ Bind(GetEntryLabel()); 116 codegen->SaveLiveRegisters(instruction_->GetLocations()); 117 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pTestSuspend).Int32Value(); 118 __ LoadFromOffset(kLoadWord, LR, TR, offset); 119 __ blx(LR); 120 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); 121 codegen->RestoreLiveRegisters(instruction_->GetLocations()); 122 if (successor_ == nullptr) { 123 __ b(GetReturnLabel()); 124 } else { 125 __ b(arm_codegen->GetLabelOf(successor_)); 126 } 127 } 128 129 Label* GetReturnLabel() { 130 DCHECK(successor_ == nullptr); 131 return &return_label_; 132 } 133 134 private: 135 HSuspendCheck* const instruction_; 136 // If not null, the block to branch to after the suspend check. 137 HBasicBlock* const successor_; 138 139 // If `successor_` is null, the label to branch to after the suspend check. 140 Label return_label_; 141 142 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM); 143}; 144 145class BoundsCheckSlowPathARM : public SlowPathCodeARM { 146 public: 147 BoundsCheckSlowPathARM(HBoundsCheck* instruction, 148 Location index_location, 149 Location length_location) 150 : instruction_(instruction), 151 index_location_(index_location), 152 length_location_(length_location) {} 153 154 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 155 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 156 __ Bind(GetEntryLabel()); 157 InvokeRuntimeCallingConvention calling_convention; 158 arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_); 159 arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_); 160 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowArrayBounds).Int32Value(); 161 __ LoadFromOffset(kLoadWord, LR, TR, offset); 162 __ blx(LR); 163 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); 164 } 165 166 private: 167 HBoundsCheck* const instruction_; 168 const Location index_location_; 169 const Location length_location_; 170 171 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM); 172}; 173 174#undef __ 175#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())-> 176 177inline Condition ARMCondition(IfCondition cond) { 178 switch (cond) { 179 case kCondEQ: return EQ; 180 case kCondNE: return NE; 181 case kCondLT: return LT; 182 case kCondLE: return LE; 183 case kCondGT: return GT; 184 case kCondGE: return GE; 185 default: 186 LOG(FATAL) << "Unknown if condition"; 187 } 188 return EQ; // Unreachable. 189} 190 191inline Condition ARMOppositeCondition(IfCondition cond) { 192 switch (cond) { 193 case kCondEQ: return NE; 194 case kCondNE: return EQ; 195 case kCondLT: return GE; 196 case kCondLE: return GT; 197 case kCondGT: return LE; 198 case kCondGE: return LT; 199 default: 200 LOG(FATAL) << "Unknown if condition"; 201 } 202 return EQ; // Unreachable. 203} 204 205void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const { 206 stream << ArmManagedRegister::FromCoreRegister(Register(reg)); 207} 208 209void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const { 210 stream << ArmManagedRegister::FromDRegister(DRegister(reg)); 211} 212 213size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) { 214 __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index); 215 return kArmWordSize; 216} 217 218size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) { 219 __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index); 220 return kArmWordSize; 221} 222 223CodeGeneratorARM::CodeGeneratorARM(HGraph* graph) 224 : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfDRegisters, kNumberOfRegisterPairs), 225 block_labels_(graph->GetArena(), 0), 226 location_builder_(graph, this), 227 instruction_visitor_(graph, this), 228 move_resolver_(graph->GetArena(), this), 229 assembler_(true) {} 230 231size_t CodeGeneratorARM::FrameEntrySpillSize() const { 232 return kNumberOfPushedRegistersAtEntry * kArmWordSize; 233} 234 235Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const { 236 switch (type) { 237 case Primitive::kPrimLong: { 238 size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs); 239 ArmManagedRegister pair = 240 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg)); 241 DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]); 242 DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]); 243 244 blocked_core_registers_[pair.AsRegisterPairLow()] = true; 245 blocked_core_registers_[pair.AsRegisterPairHigh()] = true; 246 UpdateBlockedPairRegisters(); 247 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 248 } 249 250 case Primitive::kPrimByte: 251 case Primitive::kPrimBoolean: 252 case Primitive::kPrimChar: 253 case Primitive::kPrimShort: 254 case Primitive::kPrimInt: 255 case Primitive::kPrimNot: { 256 int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters); 257 // Block all register pairs that contain `reg`. 258 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 259 ArmManagedRegister current = 260 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 261 if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) { 262 blocked_register_pairs_[i] = true; 263 } 264 } 265 return Location::RegisterLocation(reg); 266 } 267 268 case Primitive::kPrimFloat: 269 case Primitive::kPrimDouble: { 270 int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfDRegisters); 271 return Location::FpuRegisterLocation(reg); 272 } 273 274 case Primitive::kPrimVoid: 275 LOG(FATAL) << "Unreachable type " << type; 276 } 277 278 return Location(); 279} 280 281void CodeGeneratorARM::SetupBlockedRegisters() const { 282 // Don't allocate the dalvik style register pair passing. 283 blocked_register_pairs_[R1_R2] = true; 284 285 // Stack register, LR and PC are always reserved. 286 blocked_core_registers_[SP] = true; 287 blocked_core_registers_[LR] = true; 288 blocked_core_registers_[PC] = true; 289 290 // Reserve R4 for suspend check. 291 blocked_core_registers_[R4] = true; 292 293 // Reserve thread register. 294 blocked_core_registers_[TR] = true; 295 296 // Reserve temp register. 297 blocked_core_registers_[IP] = true; 298 299 // TODO: We currently don't use Quick's callee saved registers. 300 // We always save and restore R6 and R7 to make sure we can use three 301 // register pairs for long operations. 302 blocked_core_registers_[R5] = true; 303 blocked_core_registers_[R8] = true; 304 blocked_core_registers_[R10] = true; 305 blocked_core_registers_[R11] = true; 306 307 blocked_fpu_registers_[D8] = true; 308 blocked_fpu_registers_[D9] = true; 309 blocked_fpu_registers_[D10] = true; 310 blocked_fpu_registers_[D11] = true; 311 blocked_fpu_registers_[D12] = true; 312 blocked_fpu_registers_[D13] = true; 313 blocked_fpu_registers_[D14] = true; 314 blocked_fpu_registers_[D15] = true; 315 316 UpdateBlockedPairRegisters(); 317} 318 319void CodeGeneratorARM::UpdateBlockedPairRegisters() const { 320 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 321 ArmManagedRegister current = 322 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 323 if (blocked_core_registers_[current.AsRegisterPairLow()] 324 || blocked_core_registers_[current.AsRegisterPairHigh()]) { 325 blocked_register_pairs_[i] = true; 326 } 327 } 328} 329 330InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen) 331 : HGraphVisitor(graph), 332 assembler_(codegen->GetAssembler()), 333 codegen_(codegen) {} 334 335void CodeGeneratorARM::GenerateFrameEntry() { 336 bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm); 337 if (!skip_overflow_check) { 338 if (kExplicitStackOverflowCheck) { 339 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM(); 340 AddSlowPath(slow_path); 341 342 __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value()); 343 __ cmp(SP, ShifterOperand(IP)); 344 __ b(slow_path->GetEntryLabel(), CC); 345 } else { 346 __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm))); 347 __ LoadFromOffset(kLoadWord, IP, IP, 0); 348 RecordPcInfo(nullptr, 0); 349 } 350 } 351 352 core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7); 353 __ PushList(1 << LR | 1 << R6 | 1 << R7); 354 355 // The return PC has already been pushed on the stack. 356 __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize)); 357 __ StoreToOffset(kStoreWord, R0, SP, 0); 358} 359 360void CodeGeneratorARM::GenerateFrameExit() { 361 __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize); 362 __ PopList(1 << PC | 1 << R6 | 1 << R7); 363} 364 365void CodeGeneratorARM::Bind(HBasicBlock* block) { 366 __ Bind(GetLabelOf(block)); 367} 368 369Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const { 370 switch (load->GetType()) { 371 case Primitive::kPrimLong: 372 case Primitive::kPrimDouble: 373 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal())); 374 break; 375 376 case Primitive::kPrimInt: 377 case Primitive::kPrimNot: 378 case Primitive::kPrimFloat: 379 return Location::StackSlot(GetStackSlot(load->GetLocal())); 380 381 case Primitive::kPrimBoolean: 382 case Primitive::kPrimByte: 383 case Primitive::kPrimChar: 384 case Primitive::kPrimShort: 385 case Primitive::kPrimVoid: 386 LOG(FATAL) << "Unexpected type " << load->GetType(); 387 } 388 389 LOG(FATAL) << "Unreachable"; 390 return Location(); 391} 392 393Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) { 394 switch (type) { 395 case Primitive::kPrimBoolean: 396 case Primitive::kPrimByte: 397 case Primitive::kPrimChar: 398 case Primitive::kPrimShort: 399 case Primitive::kPrimInt: 400 case Primitive::kPrimFloat: 401 case Primitive::kPrimNot: { 402 uint32_t index = gp_index_++; 403 if (index < calling_convention.GetNumberOfRegisters()) { 404 return Location::RegisterLocation(calling_convention.GetRegisterAt(index)); 405 } else { 406 return Location::StackSlot(calling_convention.GetStackOffsetOf(index)); 407 } 408 } 409 410 case Primitive::kPrimLong: 411 case Primitive::kPrimDouble: { 412 uint32_t index = gp_index_; 413 gp_index_ += 2; 414 if (index + 1 < calling_convention.GetNumberOfRegisters()) { 415 ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair( 416 calling_convention.GetRegisterPairAt(index)); 417 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 418 } else if (index + 1 == calling_convention.GetNumberOfRegisters()) { 419 return Location::QuickParameter(index); 420 } else { 421 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index)); 422 } 423 } 424 425 case Primitive::kPrimVoid: 426 LOG(FATAL) << "Unexpected parameter type " << type; 427 break; 428 } 429 return Location(); 430} 431 432void CodeGeneratorARM::Move32(Location destination, Location source) { 433 if (source.Equals(destination)) { 434 return; 435 } 436 if (destination.IsRegister()) { 437 if (source.IsRegister()) { 438 __ Mov(destination.As<Register>(), source.As<Register>()); 439 } else if (source.IsFpuRegister()) { 440 __ vmovrs(destination.As<Register>(), FromDToLowS(source.As<DRegister>())); 441 } else { 442 __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex()); 443 } 444 } else if (destination.IsFpuRegister()) { 445 if (source.IsRegister()) { 446 __ vmovsr(FromDToLowS(destination.As<DRegister>()), source.As<Register>()); 447 } else if (source.IsFpuRegister()) { 448 __ vmovs(FromDToLowS(destination.As<DRegister>()), FromDToLowS(source.As<DRegister>())); 449 } else { 450 __ vldrs(FromDToLowS(destination.As<DRegister>()), Address(SP, source.GetStackIndex())); 451 } 452 } else { 453 DCHECK(destination.IsStackSlot()); 454 if (source.IsRegister()) { 455 __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex()); 456 } else if (source.IsFpuRegister()) { 457 __ vstrs(FromDToLowS(source.As<DRegister>()), Address(SP, destination.GetStackIndex())); 458 } else { 459 DCHECK(source.IsStackSlot()); 460 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 461 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 462 } 463 } 464} 465 466void CodeGeneratorARM::Move64(Location destination, Location source) { 467 if (source.Equals(destination)) { 468 return; 469 } 470 if (destination.IsRegisterPair()) { 471 if (source.IsRegisterPair()) { 472 __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>()); 473 __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>()); 474 } else if (source.IsFpuRegister()) { 475 LOG(FATAL) << "Unimplemented"; 476 } else if (source.IsQuickParameter()) { 477 uint32_t argument_index = source.GetQuickParameterIndex(); 478 InvokeDexCallingConvention calling_convention; 479 __ Mov(destination.AsRegisterPairLow<Register>(), 480 calling_convention.GetRegisterAt(argument_index)); 481 __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(), 482 SP, calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()); 483 } else { 484 DCHECK(source.IsDoubleStackSlot()); 485 if (destination.AsRegisterPairLow<Register>() == R1) { 486 DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2); 487 __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex()); 488 __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize)); 489 } else { 490 __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(), 491 SP, source.GetStackIndex()); 492 } 493 } 494 } else if (destination.IsFpuRegister()) { 495 if (source.IsDoubleStackSlot()) { 496 __ vldrd(destination.As<DRegister>(), Address(SP, source.GetStackIndex())); 497 } else { 498 LOG(FATAL) << "Unimplemented"; 499 } 500 } else if (destination.IsQuickParameter()) { 501 InvokeDexCallingConvention calling_convention; 502 uint32_t argument_index = destination.GetQuickParameterIndex(); 503 if (source.IsRegisterPair()) { 504 __ Mov(calling_convention.GetRegisterAt(argument_index), 505 source.AsRegisterPairLow<Register>()); 506 __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(), 507 SP, calling_convention.GetStackOffsetOf(argument_index + 1)); 508 } else if (source.IsFpuRegister()) { 509 LOG(FATAL) << "Unimplemented"; 510 } else { 511 DCHECK(source.IsDoubleStackSlot()); 512 __ LoadFromOffset(kLoadWord, calling_convention.GetRegisterAt(argument_index), SP, source.GetStackIndex()); 513 __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize)); 514 __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(argument_index + 1)); 515 } 516 } else { 517 DCHECK(destination.IsDoubleStackSlot()); 518 if (source.IsRegisterPair()) { 519 if (source.AsRegisterPairLow<Register>() == R1) { 520 DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2); 521 __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex()); 522 __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize)); 523 } else { 524 __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(), 525 SP, destination.GetStackIndex()); 526 } 527 } else if (source.IsQuickParameter()) { 528 InvokeDexCallingConvention calling_convention; 529 uint32_t argument_index = source.GetQuickParameterIndex(); 530 __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(argument_index), 531 SP, destination.GetStackIndex()); 532 __ LoadFromOffset(kLoadWord, R0, 533 SP, calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()); 534 __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize)); 535 } else if (source.IsFpuRegister()) { 536 __ vstrd(source.As<DRegister>(), Address(SP, destination.GetStackIndex())); 537 } else { 538 DCHECK(source.IsDoubleStackSlot()); 539 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 540 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 541 __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize)); 542 __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize)); 543 } 544 } 545} 546 547void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) { 548 LocationSummary* locations = instruction->GetLocations(); 549 if (locations != nullptr && locations->Out().Equals(location)) { 550 return; 551 } 552 553 if (instruction->IsIntConstant()) { 554 int32_t value = instruction->AsIntConstant()->GetValue(); 555 if (location.IsRegister()) { 556 __ LoadImmediate(location.As<Register>(), value); 557 } else { 558 DCHECK(location.IsStackSlot()); 559 __ LoadImmediate(IP, value); 560 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 561 } 562 } else if (instruction->IsLongConstant()) { 563 int64_t value = instruction->AsLongConstant()->GetValue(); 564 if (location.IsRegisterPair()) { 565 __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value)); 566 __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value)); 567 } else { 568 DCHECK(location.IsDoubleStackSlot()); 569 __ LoadImmediate(IP, Low32Bits(value)); 570 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 571 __ LoadImmediate(IP, High32Bits(value)); 572 __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize)); 573 } 574 } else if (instruction->IsLoadLocal()) { 575 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal()); 576 switch (instruction->GetType()) { 577 case Primitive::kPrimBoolean: 578 case Primitive::kPrimByte: 579 case Primitive::kPrimChar: 580 case Primitive::kPrimShort: 581 case Primitive::kPrimInt: 582 case Primitive::kPrimNot: 583 case Primitive::kPrimFloat: 584 Move32(location, Location::StackSlot(stack_slot)); 585 break; 586 587 case Primitive::kPrimLong: 588 case Primitive::kPrimDouble: 589 Move64(location, Location::DoubleStackSlot(stack_slot)); 590 break; 591 592 default: 593 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 594 } 595 } else { 596 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary()); 597 switch (instruction->GetType()) { 598 case Primitive::kPrimBoolean: 599 case Primitive::kPrimByte: 600 case Primitive::kPrimChar: 601 case Primitive::kPrimShort: 602 case Primitive::kPrimNot: 603 case Primitive::kPrimInt: 604 case Primitive::kPrimFloat: 605 Move32(location, locations->Out()); 606 break; 607 608 case Primitive::kPrimLong: 609 case Primitive::kPrimDouble: 610 Move64(location, locations->Out()); 611 break; 612 613 default: 614 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 615 } 616 } 617} 618 619void LocationsBuilderARM::VisitGoto(HGoto* got) { 620 got->SetLocations(nullptr); 621} 622 623void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) { 624 HBasicBlock* successor = got->GetSuccessor(); 625 DCHECK(!successor->IsExitBlock()); 626 627 HBasicBlock* block = got->GetBlock(); 628 HInstruction* previous = got->GetPrevious(); 629 630 HLoopInformation* info = block->GetLoopInformation(); 631 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) { 632 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck()); 633 GenerateSuspendCheck(info->GetSuspendCheck(), successor); 634 return; 635 } 636 637 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) { 638 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr); 639 } 640 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) { 641 __ b(codegen_->GetLabelOf(successor)); 642 } 643} 644 645void LocationsBuilderARM::VisitExit(HExit* exit) { 646 exit->SetLocations(nullptr); 647} 648 649void InstructionCodeGeneratorARM::VisitExit(HExit* exit) { 650 if (kIsDebugBuild) { 651 __ Comment("Unreachable"); 652 __ bkpt(0); 653 } 654} 655 656void LocationsBuilderARM::VisitIf(HIf* if_instr) { 657 LocationSummary* locations = 658 new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall); 659 HInstruction* cond = if_instr->InputAt(0); 660 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 661 locations->SetInAt(0, Location::RequiresRegister()); 662 } 663} 664 665void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) { 666 HInstruction* cond = if_instr->InputAt(0); 667 if (cond->IsIntConstant()) { 668 // Constant condition, statically compared against 1. 669 int32_t cond_value = cond->AsIntConstant()->GetValue(); 670 if (cond_value == 1) { 671 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 672 if_instr->IfTrueSuccessor())) { 673 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor())); 674 } 675 return; 676 } else { 677 DCHECK_EQ(cond_value, 0); 678 } 679 } else { 680 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 681 // Condition has been materialized, compare the output to 0 682 DCHECK(if_instr->GetLocations()->InAt(0).IsRegister()); 683 __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(), 684 ShifterOperand(0)); 685 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE); 686 } else { 687 // Condition has not been materialized, use its inputs as the 688 // comparison and its condition as the branch condition. 689 LocationSummary* locations = cond->GetLocations(); 690 if (locations->InAt(1).IsRegister()) { 691 __ cmp(locations->InAt(0).As<Register>(), 692 ShifterOperand(locations->InAt(1).As<Register>())); 693 } else { 694 DCHECK(locations->InAt(1).IsConstant()); 695 int32_t value = 696 locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 697 ShifterOperand operand; 698 if (ShifterOperand::CanHoldArm(value, &operand)) { 699 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value)); 700 } else { 701 Register temp = IP; 702 __ LoadImmediate(temp, value); 703 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp)); 704 } 705 } 706 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), 707 ARMCondition(cond->AsCondition()->GetCondition())); 708 } 709 } 710 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 711 if_instr->IfFalseSuccessor())) { 712 __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor())); 713 } 714} 715 716 717void LocationsBuilderARM::VisitCondition(HCondition* comp) { 718 LocationSummary* locations = 719 new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall); 720 locations->SetInAt(0, Location::RequiresRegister()); 721 locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1))); 722 if (comp->NeedsMaterialization()) { 723 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 724 } 725} 726 727void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) { 728 if (!comp->NeedsMaterialization()) return; 729 730 LocationSummary* locations = comp->GetLocations(); 731 if (locations->InAt(1).IsRegister()) { 732 __ cmp(locations->InAt(0).As<Register>(), 733 ShifterOperand(locations->InAt(1).As<Register>())); 734 } else { 735 DCHECK(locations->InAt(1).IsConstant()); 736 int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 737 ShifterOperand operand; 738 if (ShifterOperand::CanHoldArm(value, &operand)) { 739 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value)); 740 } else { 741 Register temp = IP; 742 __ LoadImmediate(temp, value); 743 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp)); 744 } 745 } 746 __ it(ARMCondition(comp->GetCondition()), kItElse); 747 __ mov(locations->Out().As<Register>(), ShifterOperand(1), 748 ARMCondition(comp->GetCondition())); 749 __ mov(locations->Out().As<Register>(), ShifterOperand(0), 750 ARMOppositeCondition(comp->GetCondition())); 751} 752 753void LocationsBuilderARM::VisitEqual(HEqual* comp) { 754 VisitCondition(comp); 755} 756 757void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) { 758 VisitCondition(comp); 759} 760 761void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) { 762 VisitCondition(comp); 763} 764 765void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) { 766 VisitCondition(comp); 767} 768 769void LocationsBuilderARM::VisitLessThan(HLessThan* comp) { 770 VisitCondition(comp); 771} 772 773void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) { 774 VisitCondition(comp); 775} 776 777void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 778 VisitCondition(comp); 779} 780 781void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 782 VisitCondition(comp); 783} 784 785void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) { 786 VisitCondition(comp); 787} 788 789void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) { 790 VisitCondition(comp); 791} 792 793void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 794 VisitCondition(comp); 795} 796 797void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 798 VisitCondition(comp); 799} 800 801void LocationsBuilderARM::VisitLocal(HLocal* local) { 802 local->SetLocations(nullptr); 803} 804 805void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) { 806 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock()); 807} 808 809void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) { 810 load->SetLocations(nullptr); 811} 812 813void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) { 814 // Nothing to do, this is driven by the code generator. 815} 816 817void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) { 818 LocationSummary* locations = 819 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall); 820 switch (store->InputAt(1)->GetType()) { 821 case Primitive::kPrimBoolean: 822 case Primitive::kPrimByte: 823 case Primitive::kPrimChar: 824 case Primitive::kPrimShort: 825 case Primitive::kPrimInt: 826 case Primitive::kPrimNot: 827 case Primitive::kPrimFloat: 828 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal()))); 829 break; 830 831 case Primitive::kPrimLong: 832 case Primitive::kPrimDouble: 833 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal()))); 834 break; 835 836 default: 837 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType(); 838 } 839} 840 841void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) { 842} 843 844void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) { 845 LocationSummary* locations = 846 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 847 locations->SetOut(Location::ConstantLocation(constant)); 848} 849 850void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) { 851 // Will be generated at use site. 852} 853 854void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) { 855 LocationSummary* locations = 856 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 857 locations->SetOut(Location::ConstantLocation(constant)); 858} 859 860void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) { 861 // Will be generated at use site. 862} 863 864void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) { 865 LocationSummary* locations = 866 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 867 locations->SetOut(Location::ConstantLocation(constant)); 868} 869 870void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) { 871 // Will be generated at use site. 872} 873 874void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) { 875 LocationSummary* locations = 876 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 877 locations->SetOut(Location::ConstantLocation(constant)); 878} 879 880void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) { 881 // Will be generated at use site. 882} 883 884void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) { 885 ret->SetLocations(nullptr); 886} 887 888void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) { 889 codegen_->GenerateFrameExit(); 890} 891 892void LocationsBuilderARM::VisitReturn(HReturn* ret) { 893 LocationSummary* locations = 894 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall); 895 switch (ret->InputAt(0)->GetType()) { 896 case Primitive::kPrimBoolean: 897 case Primitive::kPrimByte: 898 case Primitive::kPrimChar: 899 case Primitive::kPrimShort: 900 case Primitive::kPrimInt: 901 case Primitive::kPrimNot: 902 case Primitive::kPrimFloat: 903 locations->SetInAt(0, Location::RegisterLocation(R0)); 904 break; 905 906 case Primitive::kPrimLong: 907 case Primitive::kPrimDouble: 908 locations->SetInAt(0, Location::RegisterPairLocation(R0, R1)); 909 break; 910 911 default: 912 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType(); 913 } 914} 915 916void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) { 917 if (kIsDebugBuild) { 918 switch (ret->InputAt(0)->GetType()) { 919 case Primitive::kPrimBoolean: 920 case Primitive::kPrimByte: 921 case Primitive::kPrimChar: 922 case Primitive::kPrimShort: 923 case Primitive::kPrimInt: 924 case Primitive::kPrimNot: 925 case Primitive::kPrimFloat: 926 DCHECK_EQ(ret->GetLocations()->InAt(0).As<Register>(), R0); 927 break; 928 929 case Primitive::kPrimLong: 930 case Primitive::kPrimDouble: 931 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), R0); 932 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), R1); 933 break; 934 935 default: 936 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType(); 937 } 938 } 939 codegen_->GenerateFrameExit(); 940} 941 942void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) { 943 HandleInvoke(invoke); 944} 945 946void InstructionCodeGeneratorARM::LoadCurrentMethod(Register reg) { 947 __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset); 948} 949 950void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) { 951 Register temp = invoke->GetLocations()->GetTemp(0).As<Register>(); 952 uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>); 953 size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() + 954 invoke->GetIndexInDexCache() * kArmWordSize; 955 956 // TODO: Implement all kinds of calls: 957 // 1) boot -> boot 958 // 2) app -> boot 959 // 3) app -> app 960 // 961 // Currently we implement the app -> app logic, which looks up in the resolve cache. 962 963 // temp = method; 964 LoadCurrentMethod(temp); 965 // temp = temp->dex_cache_resolved_methods_; 966 __ LoadFromOffset(kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); 967 // temp = temp[index_in_cache] 968 __ LoadFromOffset(kLoadWord, temp, temp, index_in_cache); 969 // LR = temp[offset_of_quick_compiled_code] 970 __ LoadFromOffset(kLoadWord, LR, temp, 971 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()); 972 // LR() 973 __ blx(LR); 974 975 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 976 DCHECK(!codegen_->IsLeafMethod()); 977} 978 979void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 980 HandleInvoke(invoke); 981} 982 983void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) { 984 LocationSummary* locations = 985 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall); 986 locations->AddTemp(Location::RegisterLocation(R0)); 987 988 InvokeDexCallingConventionVisitor calling_convention_visitor; 989 for (size_t i = 0; i < invoke->InputCount(); i++) { 990 HInstruction* input = invoke->InputAt(i); 991 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType())); 992 } 993 994 switch (invoke->GetType()) { 995 case Primitive::kPrimBoolean: 996 case Primitive::kPrimByte: 997 case Primitive::kPrimChar: 998 case Primitive::kPrimShort: 999 case Primitive::kPrimInt: 1000 case Primitive::kPrimNot: 1001 case Primitive::kPrimFloat: 1002 locations->SetOut(Location::RegisterLocation(R0)); 1003 break; 1004 1005 case Primitive::kPrimLong: 1006 case Primitive::kPrimDouble: 1007 locations->SetOut(Location::RegisterPairLocation(R0, R1)); 1008 break; 1009 1010 case Primitive::kPrimVoid: 1011 break; 1012 } 1013} 1014 1015 1016void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1017 Register temp = invoke->GetLocations()->GetTemp(0).As<Register>(); 1018 uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() + 1019 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry); 1020 LocationSummary* locations = invoke->GetLocations(); 1021 Location receiver = locations->InAt(0); 1022 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1023 // temp = object->GetClass(); 1024 if (receiver.IsStackSlot()) { 1025 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); 1026 __ LoadFromOffset(kLoadWord, temp, temp, class_offset); 1027 } else { 1028 __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset); 1029 } 1030 // temp = temp->GetMethodAt(method_offset); 1031 uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value(); 1032 __ LoadFromOffset(kLoadWord, temp, temp, method_offset); 1033 // LR = temp->GetEntryPoint(); 1034 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); 1035 // LR(); 1036 __ blx(LR); 1037 DCHECK(!codegen_->IsLeafMethod()); 1038 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1039} 1040 1041void LocationsBuilderARM::VisitNeg(HNeg* neg) { 1042 LocationSummary* locations = 1043 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall); 1044 switch (neg->GetResultType()) { 1045 case Primitive::kPrimInt: 1046 case Primitive::kPrimLong: { 1047 bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong); 1048 locations->SetInAt(0, Location::RequiresRegister()); 1049 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1050 break; 1051 } 1052 1053 case Primitive::kPrimFloat: 1054 case Primitive::kPrimDouble: 1055 LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType(); 1056 break; 1057 1058 default: 1059 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1060 } 1061} 1062 1063void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) { 1064 LocationSummary* locations = neg->GetLocations(); 1065 Location out = locations->Out(); 1066 Location in = locations->InAt(0); 1067 switch (neg->GetResultType()) { 1068 case Primitive::kPrimInt: 1069 DCHECK(in.IsRegister()); 1070 __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0)); 1071 break; 1072 1073 case Primitive::kPrimLong: 1074 DCHECK(in.IsRegisterPair()); 1075 __ rsbs(out.AsRegisterPairLow<Register>(), 1076 in.AsRegisterPairLow<Register>(), 1077 ShifterOperand(0)); 1078 __ rsc(out.AsRegisterPairHigh<Register>(), 1079 in.AsRegisterPairHigh<Register>(), 1080 ShifterOperand(0)); 1081 break; 1082 1083 case Primitive::kPrimFloat: 1084 case Primitive::kPrimDouble: 1085 LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType(); 1086 break; 1087 1088 default: 1089 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1090 } 1091} 1092 1093void LocationsBuilderARM::VisitAdd(HAdd* add) { 1094 LocationSummary* locations = 1095 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall); 1096 switch (add->GetResultType()) { 1097 case Primitive::kPrimInt: 1098 case Primitive::kPrimLong: { 1099 bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong); 1100 locations->SetInAt(0, Location::RequiresRegister()); 1101 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1))); 1102 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1103 break; 1104 } 1105 1106 case Primitive::kPrimFloat: 1107 case Primitive::kPrimDouble: { 1108 locations->SetInAt(0, Location::RequiresFpuRegister()); 1109 locations->SetInAt(1, Location::RequiresFpuRegister()); 1110 locations->SetOut(Location::RequiresFpuRegister()); 1111 break; 1112 } 1113 1114 default: 1115 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1116 } 1117} 1118 1119void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) { 1120 LocationSummary* locations = add->GetLocations(); 1121 Location out = locations->Out(); 1122 Location first = locations->InAt(0); 1123 Location second = locations->InAt(1); 1124 switch (add->GetResultType()) { 1125 case Primitive::kPrimInt: 1126 if (second.IsRegister()) { 1127 __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>())); 1128 } else { 1129 __ AddConstant(out.As<Register>(), 1130 first.As<Register>(), 1131 second.GetConstant()->AsIntConstant()->GetValue()); 1132 } 1133 break; 1134 1135 case Primitive::kPrimLong: 1136 __ adds(out.AsRegisterPairLow<Register>(), 1137 first.AsRegisterPairLow<Register>(), 1138 ShifterOperand(second.AsRegisterPairLow<Register>())); 1139 __ adc(out.AsRegisterPairHigh<Register>(), 1140 first.AsRegisterPairHigh<Register>(), 1141 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1142 break; 1143 1144 case Primitive::kPrimFloat: 1145 __ vadds(FromDToLowS(out.As<DRegister>()), 1146 FromDToLowS(first.As<DRegister>()), 1147 FromDToLowS(second.As<DRegister>())); 1148 break; 1149 1150 case Primitive::kPrimDouble: 1151 __ vaddd(out.As<DRegister>(), first.As<DRegister>(), second.As<DRegister>()); 1152 break; 1153 1154 default: 1155 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1156 } 1157} 1158 1159void LocationsBuilderARM::VisitSub(HSub* sub) { 1160 LocationSummary* locations = 1161 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall); 1162 switch (sub->GetResultType()) { 1163 case Primitive::kPrimInt: 1164 case Primitive::kPrimLong: { 1165 bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong); 1166 locations->SetInAt(0, Location::RequiresRegister()); 1167 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1))); 1168 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1169 break; 1170 } 1171 case Primitive::kPrimFloat: 1172 case Primitive::kPrimDouble: { 1173 locations->SetInAt(0, Location::RequiresFpuRegister()); 1174 locations->SetInAt(1, Location::RequiresFpuRegister()); 1175 locations->SetOut(Location::RequiresFpuRegister()); 1176 break; 1177 } 1178 default: 1179 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1180 } 1181} 1182 1183void InstructionCodeGeneratorARM::VisitSub(HSub* sub) { 1184 LocationSummary* locations = sub->GetLocations(); 1185 Location out = locations->Out(); 1186 Location first = locations->InAt(0); 1187 Location second = locations->InAt(1); 1188 switch (sub->GetResultType()) { 1189 case Primitive::kPrimInt: { 1190 if (second.IsRegister()) { 1191 __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>())); 1192 } else { 1193 __ AddConstant(out.As<Register>(), 1194 first.As<Register>(), 1195 -second.GetConstant()->AsIntConstant()->GetValue()); 1196 } 1197 break; 1198 } 1199 1200 case Primitive::kPrimLong: { 1201 __ subs(out.AsRegisterPairLow<Register>(), 1202 first.AsRegisterPairLow<Register>(), 1203 ShifterOperand(second.AsRegisterPairLow<Register>())); 1204 __ sbc(out.AsRegisterPairHigh<Register>(), 1205 first.AsRegisterPairHigh<Register>(), 1206 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1207 break; 1208 } 1209 1210 case Primitive::kPrimFloat: { 1211 __ vsubs(FromDToLowS(out.As<DRegister>()), 1212 FromDToLowS(first.As<DRegister>()), 1213 FromDToLowS(second.As<DRegister>())); 1214 break; 1215 } 1216 1217 case Primitive::kPrimDouble: { 1218 __ vsubd(out.As<DRegister>(), first.As<DRegister>(), second.As<DRegister>()); 1219 break; 1220 } 1221 1222 1223 default: 1224 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1225 } 1226} 1227 1228void LocationsBuilderARM::VisitMul(HMul* mul) { 1229 LocationSummary* locations = 1230 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall); 1231 switch (mul->GetResultType()) { 1232 case Primitive::kPrimInt: 1233 case Primitive::kPrimLong: { 1234 locations->SetInAt(0, Location::RequiresRegister()); 1235 locations->SetInAt(1, Location::RequiresRegister()); 1236 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1237 break; 1238 } 1239 1240 case Primitive::kPrimFloat: 1241 case Primitive::kPrimDouble: { 1242 locations->SetInAt(0, Location::RequiresFpuRegister()); 1243 locations->SetInAt(1, Location::RequiresFpuRegister()); 1244 locations->SetOut(Location::RequiresFpuRegister()); 1245 break; 1246 } 1247 1248 default: 1249 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1250 } 1251} 1252 1253void InstructionCodeGeneratorARM::VisitMul(HMul* mul) { 1254 LocationSummary* locations = mul->GetLocations(); 1255 Location out = locations->Out(); 1256 Location first = locations->InAt(0); 1257 Location second = locations->InAt(1); 1258 switch (mul->GetResultType()) { 1259 case Primitive::kPrimInt: { 1260 __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>()); 1261 break; 1262 } 1263 case Primitive::kPrimLong: { 1264 Register out_hi = out.AsRegisterPairHigh<Register>(); 1265 Register out_lo = out.AsRegisterPairLow<Register>(); 1266 Register in1_hi = first.AsRegisterPairHigh<Register>(); 1267 Register in1_lo = first.AsRegisterPairLow<Register>(); 1268 Register in2_hi = second.AsRegisterPairHigh<Register>(); 1269 Register in2_lo = second.AsRegisterPairLow<Register>(); 1270 1271 // Extra checks to protect caused by the existence of R1_R2. 1272 // The algorithm is wrong if out.hi is either in1.lo or in2.lo: 1273 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2); 1274 DCHECK_NE(out_hi, in1_lo); 1275 DCHECK_NE(out_hi, in2_lo); 1276 1277 // input: in1 - 64 bits, in2 - 64 bits 1278 // output: out 1279 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo 1280 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32] 1281 // parts: out.lo = (in1.lo * in2.lo)[31:0] 1282 1283 // IP <- in1.lo * in2.hi 1284 __ mul(IP, in1_lo, in2_hi); 1285 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo 1286 __ mla(out_hi, in1_hi, in2_lo, IP); 1287 // out.lo <- (in1.lo * in2.lo)[31:0]; 1288 __ umull(out_lo, IP, in1_lo, in2_lo); 1289 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32] 1290 __ add(out_hi, out_hi, ShifterOperand(IP)); 1291 break; 1292 } 1293 1294 case Primitive::kPrimFloat: { 1295 __ vmuls(FromDToLowS(out.As<DRegister>()), 1296 FromDToLowS(first.As<DRegister>()), 1297 FromDToLowS(second.As<DRegister>())); 1298 break; 1299 } 1300 1301 case Primitive::kPrimDouble: { 1302 __ vmuld(out.As<DRegister>(), first.As<DRegister>(), second.As<DRegister>()); 1303 break; 1304 } 1305 1306 default: 1307 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1308 } 1309} 1310 1311void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) { 1312 LocationSummary* locations = 1313 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1314 InvokeRuntimeCallingConvention calling_convention; 1315 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1316 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1317 locations->SetOut(Location::RegisterLocation(R0)); 1318} 1319 1320void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) { 1321 InvokeRuntimeCallingConvention calling_convention; 1322 LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 1323 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 1324 1325 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocObjectWithAccessCheck).Int32Value(); 1326 __ LoadFromOffset(kLoadWord, LR, TR, offset); 1327 __ blx(LR); 1328 1329 codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); 1330 DCHECK(!codegen_->IsLeafMethod()); 1331} 1332 1333void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) { 1334 LocationSummary* locations = 1335 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1336 InvokeRuntimeCallingConvention calling_convention; 1337 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1338 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1339 locations->SetOut(Location::RegisterLocation(R0)); 1340 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 1341} 1342 1343void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) { 1344 InvokeRuntimeCallingConvention calling_convention; 1345 LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 1346 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 1347 1348 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocArrayWithAccessCheck).Int32Value(); 1349 __ LoadFromOffset(kLoadWord, LR, TR, offset); 1350 __ blx(LR); 1351 1352 codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); 1353 DCHECK(!codegen_->IsLeafMethod()); 1354} 1355 1356void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) { 1357 LocationSummary* locations = 1358 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1359 Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); 1360 if (location.IsStackSlot()) { 1361 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 1362 } else if (location.IsDoubleStackSlot()) { 1363 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 1364 } 1365 locations->SetOut(location); 1366} 1367 1368void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) { 1369 // Nothing to do, the parameter is already at its location. 1370} 1371 1372void LocationsBuilderARM::VisitNot(HNot* not_) { 1373 LocationSummary* locations = 1374 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall); 1375 locations->SetInAt(0, Location::RequiresRegister()); 1376 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1377} 1378 1379void InstructionCodeGeneratorARM::VisitNot(HNot* not_) { 1380 LocationSummary* locations = not_->GetLocations(); 1381 Location out = locations->Out(); 1382 Location in = locations->InAt(0); 1383 switch (not_->InputAt(0)->GetType()) { 1384 case Primitive::kPrimBoolean: 1385 __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1)); 1386 break; 1387 1388 case Primitive::kPrimInt: 1389 __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>())); 1390 break; 1391 1392 case Primitive::kPrimLong: 1393 LOG(FATAL) << "Not yet implemented type for not operation " << not_->GetResultType(); 1394 break; 1395 1396 default: 1397 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType(); 1398 } 1399} 1400 1401void LocationsBuilderARM::VisitCompare(HCompare* compare) { 1402 LocationSummary* locations = 1403 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); 1404 locations->SetInAt(0, Location::RequiresRegister()); 1405 locations->SetInAt(1, Location::RequiresRegister()); 1406 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1407} 1408 1409void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) { 1410 Label greater, done; 1411 LocationSummary* locations = compare->GetLocations(); 1412 switch (compare->InputAt(0)->GetType()) { 1413 case Primitive::kPrimLong: { 1414 Register output = locations->Out().As<Register>(); 1415 Location left = locations->InAt(0); 1416 Location right = locations->InAt(1); 1417 Label less, greater, done; 1418 __ cmp(left.AsRegisterPairHigh<Register>(), 1419 ShifterOperand(right.AsRegisterPairHigh<Register>())); // Signed compare. 1420 __ b(&less, LT); 1421 __ b(&greater, GT); 1422 // Do LoadImmediate before any `cmp`, as LoadImmediate might affect 1423 // the status flags. 1424 __ LoadImmediate(output, 0); 1425 __ cmp(left.AsRegisterPairLow<Register>(), 1426 ShifterOperand(right.AsRegisterPairLow<Register>())); // Unsigned compare. 1427 __ b(&done, EQ); 1428 __ b(&less, CC); 1429 1430 __ Bind(&greater); 1431 __ LoadImmediate(output, 1); 1432 __ b(&done); 1433 1434 __ Bind(&less); 1435 __ LoadImmediate(output, -1); 1436 1437 __ Bind(&done); 1438 break; 1439 } 1440 default: 1441 LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType(); 1442 } 1443} 1444 1445void LocationsBuilderARM::VisitPhi(HPhi* instruction) { 1446 LocationSummary* locations = 1447 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1448 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) { 1449 locations->SetInAt(i, Location::Any()); 1450 } 1451 locations->SetOut(Location::Any()); 1452} 1453 1454void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) { 1455 LOG(FATAL) << "Unreachable"; 1456} 1457 1458void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 1459 LocationSummary* locations = 1460 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1461 bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot; 1462 locations->SetInAt(0, Location::RequiresRegister()); 1463 locations->SetInAt(1, Location::RequiresRegister()); 1464 // Temporary registers for the write barrier. 1465 if (is_object_type) { 1466 locations->AddTemp(Location::RequiresRegister()); 1467 locations->AddTemp(Location::RequiresRegister()); 1468 } 1469} 1470 1471void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 1472 LocationSummary* locations = instruction->GetLocations(); 1473 Register obj = locations->InAt(0).As<Register>(); 1474 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 1475 Primitive::Type field_type = instruction->GetFieldType(); 1476 1477 switch (field_type) { 1478 case Primitive::kPrimBoolean: 1479 case Primitive::kPrimByte: { 1480 Register value = locations->InAt(1).As<Register>(); 1481 __ StoreToOffset(kStoreByte, value, obj, offset); 1482 break; 1483 } 1484 1485 case Primitive::kPrimShort: 1486 case Primitive::kPrimChar: { 1487 Register value = locations->InAt(1).As<Register>(); 1488 __ StoreToOffset(kStoreHalfword, value, obj, offset); 1489 break; 1490 } 1491 1492 case Primitive::kPrimInt: 1493 case Primitive::kPrimNot: { 1494 Register value = locations->InAt(1).As<Register>(); 1495 __ StoreToOffset(kStoreWord, value, obj, offset); 1496 if (field_type == Primitive::kPrimNot) { 1497 Register temp = locations->GetTemp(0).As<Register>(); 1498 Register card = locations->GetTemp(1).As<Register>(); 1499 codegen_->MarkGCCard(temp, card, obj, value); 1500 } 1501 break; 1502 } 1503 1504 case Primitive::kPrimLong: { 1505 Location value = locations->InAt(1); 1506 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 1507 break; 1508 } 1509 1510 case Primitive::kPrimFloat: 1511 case Primitive::kPrimDouble: 1512 LOG(FATAL) << "Unimplemented register type " << field_type; 1513 UNREACHABLE(); 1514 case Primitive::kPrimVoid: 1515 LOG(FATAL) << "Unreachable type " << field_type; 1516 UNREACHABLE(); 1517 } 1518} 1519 1520void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 1521 LocationSummary* locations = 1522 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1523 locations->SetInAt(0, Location::RequiresRegister()); 1524 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1525} 1526 1527void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 1528 LocationSummary* locations = instruction->GetLocations(); 1529 Register obj = locations->InAt(0).As<Register>(); 1530 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 1531 1532 switch (instruction->GetType()) { 1533 case Primitive::kPrimBoolean: { 1534 Register out = locations->Out().As<Register>(); 1535 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 1536 break; 1537 } 1538 1539 case Primitive::kPrimByte: { 1540 Register out = locations->Out().As<Register>(); 1541 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 1542 break; 1543 } 1544 1545 case Primitive::kPrimShort: { 1546 Register out = locations->Out().As<Register>(); 1547 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 1548 break; 1549 } 1550 1551 case Primitive::kPrimChar: { 1552 Register out = locations->Out().As<Register>(); 1553 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 1554 break; 1555 } 1556 1557 case Primitive::kPrimInt: 1558 case Primitive::kPrimNot: { 1559 Register out = locations->Out().As<Register>(); 1560 __ LoadFromOffset(kLoadWord, out, obj, offset); 1561 break; 1562 } 1563 1564 case Primitive::kPrimLong: { 1565 // TODO: support volatile. 1566 Location out = locations->Out(); 1567 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 1568 break; 1569 } 1570 1571 case Primitive::kPrimFloat: 1572 case Primitive::kPrimDouble: 1573 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 1574 UNREACHABLE(); 1575 case Primitive::kPrimVoid: 1576 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 1577 UNREACHABLE(); 1578 } 1579} 1580 1581void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) { 1582 LocationSummary* locations = 1583 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1584 locations->SetInAt(0, Location::RequiresRegister()); 1585 if (instruction->HasUses()) { 1586 locations->SetOut(Location::SameAsFirstInput()); 1587 } 1588} 1589 1590void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) { 1591 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction); 1592 codegen_->AddSlowPath(slow_path); 1593 1594 LocationSummary* locations = instruction->GetLocations(); 1595 Location obj = locations->InAt(0); 1596 1597 if (obj.IsRegister()) { 1598 __ cmp(obj.As<Register>(), ShifterOperand(0)); 1599 __ b(slow_path->GetEntryLabel(), EQ); 1600 } else { 1601 DCHECK(obj.IsConstant()) << obj; 1602 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0); 1603 __ b(slow_path->GetEntryLabel()); 1604 } 1605} 1606 1607void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) { 1608 LocationSummary* locations = 1609 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1610 locations->SetInAt(0, Location::RequiresRegister()); 1611 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 1612 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1613} 1614 1615void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) { 1616 LocationSummary* locations = instruction->GetLocations(); 1617 Register obj = locations->InAt(0).As<Register>(); 1618 Location index = locations->InAt(1); 1619 1620 switch (instruction->GetType()) { 1621 case Primitive::kPrimBoolean: { 1622 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 1623 Register out = locations->Out().As<Register>(); 1624 if (index.IsConstant()) { 1625 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 1626 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 1627 } else { 1628 __ add(IP, obj, ShifterOperand(index.As<Register>())); 1629 __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset); 1630 } 1631 break; 1632 } 1633 1634 case Primitive::kPrimByte: { 1635 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value(); 1636 Register out = locations->Out().As<Register>(); 1637 if (index.IsConstant()) { 1638 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 1639 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 1640 } else { 1641 __ add(IP, obj, ShifterOperand(index.As<Register>())); 1642 __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset); 1643 } 1644 break; 1645 } 1646 1647 case Primitive::kPrimShort: { 1648 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value(); 1649 Register out = locations->Out().As<Register>(); 1650 if (index.IsConstant()) { 1651 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 1652 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 1653 } else { 1654 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2)); 1655 __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset); 1656 } 1657 break; 1658 } 1659 1660 case Primitive::kPrimChar: { 1661 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 1662 Register out = locations->Out().As<Register>(); 1663 if (index.IsConstant()) { 1664 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 1665 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 1666 } else { 1667 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2)); 1668 __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset); 1669 } 1670 break; 1671 } 1672 1673 case Primitive::kPrimInt: 1674 case Primitive::kPrimNot: { 1675 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t)); 1676 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 1677 Register out = locations->Out().As<Register>(); 1678 if (index.IsConstant()) { 1679 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 1680 __ LoadFromOffset(kLoadWord, out, obj, offset); 1681 } else { 1682 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4)); 1683 __ LoadFromOffset(kLoadWord, out, IP, data_offset); 1684 } 1685 break; 1686 } 1687 1688 case Primitive::kPrimLong: { 1689 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 1690 Location out = locations->Out(); 1691 if (index.IsConstant()) { 1692 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 1693 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 1694 } else { 1695 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8)); 1696 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset); 1697 } 1698 break; 1699 } 1700 1701 case Primitive::kPrimFloat: 1702 case Primitive::kPrimDouble: 1703 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 1704 UNREACHABLE(); 1705 case Primitive::kPrimVoid: 1706 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 1707 UNREACHABLE(); 1708 } 1709} 1710 1711void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) { 1712 Primitive::Type value_type = instruction->GetComponentType(); 1713 bool is_object = value_type == Primitive::kPrimNot; 1714 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 1715 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall); 1716 if (is_object) { 1717 InvokeRuntimeCallingConvention calling_convention; 1718 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1719 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1720 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 1721 } else { 1722 locations->SetInAt(0, Location::RequiresRegister()); 1723 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 1724 locations->SetInAt(2, Location::RequiresRegister()); 1725 } 1726} 1727 1728void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) { 1729 LocationSummary* locations = instruction->GetLocations(); 1730 Register obj = locations->InAt(0).As<Register>(); 1731 Location index = locations->InAt(1); 1732 Primitive::Type value_type = instruction->GetComponentType(); 1733 1734 switch (value_type) { 1735 case Primitive::kPrimBoolean: 1736 case Primitive::kPrimByte: { 1737 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 1738 Register value = locations->InAt(2).As<Register>(); 1739 if (index.IsConstant()) { 1740 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 1741 __ StoreToOffset(kStoreByte, value, obj, offset); 1742 } else { 1743 __ add(IP, obj, ShifterOperand(index.As<Register>())); 1744 __ StoreToOffset(kStoreByte, value, IP, data_offset); 1745 } 1746 break; 1747 } 1748 1749 case Primitive::kPrimShort: 1750 case Primitive::kPrimChar: { 1751 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 1752 Register value = locations->InAt(2).As<Register>(); 1753 if (index.IsConstant()) { 1754 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 1755 __ StoreToOffset(kStoreHalfword, value, obj, offset); 1756 } else { 1757 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2)); 1758 __ StoreToOffset(kStoreHalfword, value, IP, data_offset); 1759 } 1760 break; 1761 } 1762 1763 case Primitive::kPrimInt: { 1764 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 1765 Register value = locations->InAt(2).As<Register>(); 1766 if (index.IsConstant()) { 1767 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 1768 __ StoreToOffset(kStoreWord, value, obj, offset); 1769 } else { 1770 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4)); 1771 __ StoreToOffset(kStoreWord, value, IP, data_offset); 1772 } 1773 break; 1774 } 1775 1776 case Primitive::kPrimNot: { 1777 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAputObject).Int32Value(); 1778 __ LoadFromOffset(kLoadWord, LR, TR, offset); 1779 __ blx(LR); 1780 codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); 1781 DCHECK(!codegen_->IsLeafMethod()); 1782 break; 1783 } 1784 1785 case Primitive::kPrimLong: { 1786 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 1787 Location value = locations->InAt(2); 1788 if (index.IsConstant()) { 1789 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 1790 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 1791 } else { 1792 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8)); 1793 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset); 1794 } 1795 break; 1796 } 1797 1798 case Primitive::kPrimFloat: 1799 case Primitive::kPrimDouble: 1800 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 1801 UNREACHABLE(); 1802 case Primitive::kPrimVoid: 1803 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 1804 UNREACHABLE(); 1805 } 1806} 1807 1808void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) { 1809 LocationSummary* locations = 1810 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1811 locations->SetInAt(0, Location::RequiresRegister()); 1812 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1813} 1814 1815void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) { 1816 LocationSummary* locations = instruction->GetLocations(); 1817 uint32_t offset = mirror::Array::LengthOffset().Uint32Value(); 1818 Register obj = locations->InAt(0).As<Register>(); 1819 Register out = locations->Out().As<Register>(); 1820 __ LoadFromOffset(kLoadWord, out, obj, offset); 1821} 1822 1823void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) { 1824 LocationSummary* locations = 1825 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1826 locations->SetInAt(0, Location::RequiresRegister()); 1827 locations->SetInAt(1, Location::RequiresRegister()); 1828 if (instruction->HasUses()) { 1829 locations->SetOut(Location::SameAsFirstInput()); 1830 } 1831} 1832 1833void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) { 1834 LocationSummary* locations = instruction->GetLocations(); 1835 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM( 1836 instruction, locations->InAt(0), locations->InAt(1)); 1837 codegen_->AddSlowPath(slow_path); 1838 1839 Register index = locations->InAt(0).As<Register>(); 1840 Register length = locations->InAt(1).As<Register>(); 1841 1842 __ cmp(index, ShifterOperand(length)); 1843 __ b(slow_path->GetEntryLabel(), CS); 1844} 1845 1846void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) { 1847 Label is_null; 1848 __ CompareAndBranchIfZero(value, &is_null); 1849 __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value()); 1850 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); 1851 __ strb(card, Address(card, temp)); 1852 __ Bind(&is_null); 1853} 1854 1855void LocationsBuilderARM::VisitTemporary(HTemporary* temp) { 1856 temp->SetLocations(nullptr); 1857} 1858 1859void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) { 1860 // Nothing to do, this is driven by the code generator. 1861} 1862 1863void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) { 1864 LOG(FATAL) << "Unreachable"; 1865} 1866 1867void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) { 1868 codegen_->GetMoveResolver()->EmitNativeCode(instruction); 1869} 1870 1871void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) { 1872 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath); 1873} 1874 1875void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) { 1876 HBasicBlock* block = instruction->GetBlock(); 1877 if (block->GetLoopInformation() != nullptr) { 1878 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction); 1879 // The back edge will generate the suspend check. 1880 return; 1881 } 1882 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) { 1883 // The goto will generate the suspend check. 1884 return; 1885 } 1886 GenerateSuspendCheck(instruction, nullptr); 1887} 1888 1889void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction, 1890 HBasicBlock* successor) { 1891 SuspendCheckSlowPathARM* slow_path = 1892 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor); 1893 codegen_->AddSlowPath(slow_path); 1894 1895 __ subs(R4, R4, ShifterOperand(1)); 1896 if (successor == nullptr) { 1897 __ b(slow_path->GetEntryLabel(), EQ); 1898 __ Bind(slow_path->GetReturnLabel()); 1899 } else { 1900 __ b(codegen_->GetLabelOf(successor), NE); 1901 __ b(slow_path->GetEntryLabel()); 1902 } 1903} 1904 1905ArmAssembler* ParallelMoveResolverARM::GetAssembler() const { 1906 return codegen_->GetAssembler(); 1907} 1908 1909void ParallelMoveResolverARM::EmitMove(size_t index) { 1910 MoveOperands* move = moves_.Get(index); 1911 Location source = move->GetSource(); 1912 Location destination = move->GetDestination(); 1913 1914 if (source.IsRegister()) { 1915 if (destination.IsRegister()) { 1916 __ Mov(destination.As<Register>(), source.As<Register>()); 1917 } else { 1918 DCHECK(destination.IsStackSlot()); 1919 __ StoreToOffset(kStoreWord, source.As<Register>(), 1920 SP, destination.GetStackIndex()); 1921 } 1922 } else if (source.IsStackSlot()) { 1923 if (destination.IsRegister()) { 1924 __ LoadFromOffset(kLoadWord, destination.As<Register>(), 1925 SP, source.GetStackIndex()); 1926 } else { 1927 DCHECK(destination.IsStackSlot()); 1928 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 1929 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 1930 } 1931 } else { 1932 DCHECK(source.IsConstant()); 1933 DCHECK(source.GetConstant()->IsIntConstant()); 1934 int32_t value = source.GetConstant()->AsIntConstant()->GetValue(); 1935 if (destination.IsRegister()) { 1936 __ LoadImmediate(destination.As<Register>(), value); 1937 } else { 1938 DCHECK(destination.IsStackSlot()); 1939 __ LoadImmediate(IP, value); 1940 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 1941 } 1942 } 1943} 1944 1945void ParallelMoveResolverARM::Exchange(Register reg, int mem) { 1946 __ Mov(IP, reg); 1947 __ LoadFromOffset(kLoadWord, reg, SP, mem); 1948 __ StoreToOffset(kStoreWord, IP, SP, mem); 1949} 1950 1951void ParallelMoveResolverARM::Exchange(int mem1, int mem2) { 1952 ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters()); 1953 int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0; 1954 __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()), 1955 SP, mem1 + stack_offset); 1956 __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset); 1957 __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()), 1958 SP, mem2 + stack_offset); 1959 __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset); 1960} 1961 1962void ParallelMoveResolverARM::EmitSwap(size_t index) { 1963 MoveOperands* move = moves_.Get(index); 1964 Location source = move->GetSource(); 1965 Location destination = move->GetDestination(); 1966 1967 if (source.IsRegister() && destination.IsRegister()) { 1968 DCHECK_NE(source.As<Register>(), IP); 1969 DCHECK_NE(destination.As<Register>(), IP); 1970 __ Mov(IP, source.As<Register>()); 1971 __ Mov(source.As<Register>(), destination.As<Register>()); 1972 __ Mov(destination.As<Register>(), IP); 1973 } else if (source.IsRegister() && destination.IsStackSlot()) { 1974 Exchange(source.As<Register>(), destination.GetStackIndex()); 1975 } else if (source.IsStackSlot() && destination.IsRegister()) { 1976 Exchange(destination.As<Register>(), source.GetStackIndex()); 1977 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 1978 Exchange(source.GetStackIndex(), destination.GetStackIndex()); 1979 } else { 1980 LOG(FATAL) << "Unimplemented"; 1981 } 1982} 1983 1984void ParallelMoveResolverARM::SpillScratch(int reg) { 1985 __ Push(static_cast<Register>(reg)); 1986} 1987 1988void ParallelMoveResolverARM::RestoreScratch(int reg) { 1989 __ Pop(static_cast<Register>(reg)); 1990} 1991 1992} // namespace arm 1993} // namespace art 1994