code_generator_arm.cc revision 1ba0f596e9e4ddd778ab431237d11baa85594eba
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "code_generator_arm.h" 18 19#include "entrypoints/quick/quick_entrypoints.h" 20#include "gc/accounting/card_table.h" 21#include "mirror/array-inl.h" 22#include "mirror/art_method.h" 23#include "mirror/class.h" 24#include "thread.h" 25#include "utils/assembler.h" 26#include "utils/arm/assembler_arm.h" 27#include "utils/arm/managed_register_arm.h" 28#include "utils/stack_checks.h" 29 30namespace art { 31 32namespace arm { 33 34static DRegister FromLowSToD(SRegister reg) { 35 DCHECK_EQ(reg % 2, 0); 36 return static_cast<DRegister>(reg / 2); 37} 38 39static constexpr bool kExplicitStackOverflowCheck = false; 40 41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2; // LR, R6, R7 42static constexpr int kCurrentMethodStackOffset = 0; 43 44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 }; 45static constexpr size_t kRuntimeParameterCoreRegistersLength = 46 arraysize(kRuntimeParameterCoreRegisters); 47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { }; 48static constexpr size_t kRuntimeParameterFpuRegistersLength = 0; 49 50class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> { 51 public: 52 InvokeRuntimeCallingConvention() 53 : CallingConvention(kRuntimeParameterCoreRegisters, 54 kRuntimeParameterCoreRegistersLength, 55 kRuntimeParameterFpuRegisters, 56 kRuntimeParameterFpuRegistersLength) {} 57 58 private: 59 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention); 60}; 61 62#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> 63 64class SlowPathCodeARM : public SlowPathCode { 65 public: 66 SlowPathCodeARM() : entry_label_(), exit_label_() {} 67 68 Label* GetEntryLabel() { return &entry_label_; } 69 Label* GetExitLabel() { return &exit_label_; } 70 71 private: 72 Label entry_label_; 73 Label exit_label_; 74 75 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM); 76}; 77 78class NullCheckSlowPathARM : public SlowPathCodeARM { 79 public: 80 explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {} 81 82 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 83 __ Bind(GetEntryLabel()); 84 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowNullPointer).Int32Value(); 85 __ LoadFromOffset(kLoadWord, LR, TR, offset); 86 __ blx(LR); 87 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); 88 } 89 90 private: 91 HNullCheck* const instruction_; 92 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM); 93}; 94 95class StackOverflowCheckSlowPathARM : public SlowPathCodeARM { 96 public: 97 StackOverflowCheckSlowPathARM() {} 98 99 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 100 __ Bind(GetEntryLabel()); 101 __ LoadFromOffset(kLoadWord, PC, TR, 102 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value()); 103 } 104 105 private: 106 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM); 107}; 108 109class SuspendCheckSlowPathARM : public SlowPathCodeARM { 110 public: 111 explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor) 112 : instruction_(instruction), successor_(successor) {} 113 114 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 115 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 116 __ Bind(GetEntryLabel()); 117 codegen->SaveLiveRegisters(instruction_->GetLocations()); 118 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pTestSuspend).Int32Value(); 119 __ LoadFromOffset(kLoadWord, LR, TR, offset); 120 __ blx(LR); 121 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); 122 codegen->RestoreLiveRegisters(instruction_->GetLocations()); 123 if (successor_ == nullptr) { 124 __ b(GetReturnLabel()); 125 } else { 126 __ b(arm_codegen->GetLabelOf(successor_)); 127 } 128 } 129 130 Label* GetReturnLabel() { 131 DCHECK(successor_ == nullptr); 132 return &return_label_; 133 } 134 135 private: 136 HSuspendCheck* const instruction_; 137 // If not null, the block to branch to after the suspend check. 138 HBasicBlock* const successor_; 139 140 // If `successor_` is null, the label to branch to after the suspend check. 141 Label return_label_; 142 143 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM); 144}; 145 146class BoundsCheckSlowPathARM : public SlowPathCodeARM { 147 public: 148 BoundsCheckSlowPathARM(HBoundsCheck* instruction, 149 Location index_location, 150 Location length_location) 151 : instruction_(instruction), 152 index_location_(index_location), 153 length_location_(length_location) {} 154 155 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 156 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 157 __ Bind(GetEntryLabel()); 158 InvokeRuntimeCallingConvention calling_convention; 159 arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_); 160 arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_); 161 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowArrayBounds).Int32Value(); 162 __ LoadFromOffset(kLoadWord, LR, TR, offset); 163 __ blx(LR); 164 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); 165 } 166 167 private: 168 HBoundsCheck* const instruction_; 169 const Location index_location_; 170 const Location length_location_; 171 172 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM); 173}; 174 175#undef __ 176#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())-> 177 178inline Condition ARMCondition(IfCondition cond) { 179 switch (cond) { 180 case kCondEQ: return EQ; 181 case kCondNE: return NE; 182 case kCondLT: return LT; 183 case kCondLE: return LE; 184 case kCondGT: return GT; 185 case kCondGE: return GE; 186 default: 187 LOG(FATAL) << "Unknown if condition"; 188 } 189 return EQ; // Unreachable. 190} 191 192inline Condition ARMOppositeCondition(IfCondition cond) { 193 switch (cond) { 194 case kCondEQ: return NE; 195 case kCondNE: return EQ; 196 case kCondLT: return GE; 197 case kCondLE: return GT; 198 case kCondGT: return LE; 199 case kCondGE: return LT; 200 default: 201 LOG(FATAL) << "Unknown if condition"; 202 } 203 return EQ; // Unreachable. 204} 205 206void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const { 207 stream << ArmManagedRegister::FromCoreRegister(Register(reg)); 208} 209 210void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const { 211 stream << ArmManagedRegister::FromSRegister(SRegister(reg)); 212} 213 214size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) { 215 __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index); 216 return kArmWordSize; 217} 218 219size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) { 220 __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index); 221 return kArmWordSize; 222} 223 224CodeGeneratorARM::CodeGeneratorARM(HGraph* graph) 225 : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs), 226 block_labels_(graph->GetArena(), 0), 227 location_builder_(graph, this), 228 instruction_visitor_(graph, this), 229 move_resolver_(graph->GetArena(), this), 230 assembler_(true) {} 231 232size_t CodeGeneratorARM::FrameEntrySpillSize() const { 233 return kNumberOfPushedRegistersAtEntry * kArmWordSize; 234} 235 236Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const { 237 switch (type) { 238 case Primitive::kPrimLong: { 239 size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs); 240 ArmManagedRegister pair = 241 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg)); 242 DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]); 243 DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]); 244 245 blocked_core_registers_[pair.AsRegisterPairLow()] = true; 246 blocked_core_registers_[pair.AsRegisterPairHigh()] = true; 247 UpdateBlockedPairRegisters(); 248 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 249 } 250 251 case Primitive::kPrimByte: 252 case Primitive::kPrimBoolean: 253 case Primitive::kPrimChar: 254 case Primitive::kPrimShort: 255 case Primitive::kPrimInt: 256 case Primitive::kPrimNot: { 257 int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters); 258 // Block all register pairs that contain `reg`. 259 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 260 ArmManagedRegister current = 261 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 262 if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) { 263 blocked_register_pairs_[i] = true; 264 } 265 } 266 return Location::RegisterLocation(reg); 267 } 268 269 case Primitive::kPrimFloat: { 270 int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters); 271 return Location::FpuRegisterLocation(reg); 272 } 273 274 case Primitive::kPrimDouble: { 275 int reg = FindTwoFreeConsecutiveEntries(blocked_fpu_registers_, kNumberOfSRegisters); 276 return Location::FpuRegisterPairLocation(reg, reg + 1); 277 } 278 279 case Primitive::kPrimVoid: 280 LOG(FATAL) << "Unreachable type " << type; 281 } 282 283 return Location(); 284} 285 286void CodeGeneratorARM::SetupBlockedRegisters() const { 287 // Don't allocate the dalvik style register pair passing. 288 blocked_register_pairs_[R1_R2] = true; 289 290 // Stack register, LR and PC are always reserved. 291 blocked_core_registers_[SP] = true; 292 blocked_core_registers_[LR] = true; 293 blocked_core_registers_[PC] = true; 294 295 // Reserve R4 for suspend check. 296 blocked_core_registers_[R4] = true; 297 298 // Reserve thread register. 299 blocked_core_registers_[TR] = true; 300 301 // Reserve temp register. 302 blocked_core_registers_[IP] = true; 303 304 // TODO: We currently don't use Quick's callee saved registers. 305 // We always save and restore R6 and R7 to make sure we can use three 306 // register pairs for long operations. 307 blocked_core_registers_[R5] = true; 308 blocked_core_registers_[R8] = true; 309 blocked_core_registers_[R10] = true; 310 blocked_core_registers_[R11] = true; 311 312 blocked_fpu_registers_[S16] = true; 313 blocked_fpu_registers_[S17] = true; 314 blocked_fpu_registers_[S18] = true; 315 blocked_fpu_registers_[S19] = true; 316 blocked_fpu_registers_[S20] = true; 317 blocked_fpu_registers_[S21] = true; 318 blocked_fpu_registers_[S22] = true; 319 blocked_fpu_registers_[S23] = true; 320 321 UpdateBlockedPairRegisters(); 322} 323 324void CodeGeneratorARM::UpdateBlockedPairRegisters() const { 325 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 326 ArmManagedRegister current = 327 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 328 if (blocked_core_registers_[current.AsRegisterPairLow()] 329 || blocked_core_registers_[current.AsRegisterPairHigh()]) { 330 blocked_register_pairs_[i] = true; 331 } 332 } 333} 334 335InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen) 336 : HGraphVisitor(graph), 337 assembler_(codegen->GetAssembler()), 338 codegen_(codegen) {} 339 340void CodeGeneratorARM::GenerateFrameEntry() { 341 bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm); 342 if (!skip_overflow_check) { 343 if (kExplicitStackOverflowCheck) { 344 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM(); 345 AddSlowPath(slow_path); 346 347 __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value()); 348 __ cmp(SP, ShifterOperand(IP)); 349 __ b(slow_path->GetEntryLabel(), CC); 350 } else { 351 __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm))); 352 __ LoadFromOffset(kLoadWord, IP, IP, 0); 353 RecordPcInfo(nullptr, 0); 354 } 355 } 356 357 core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7); 358 __ PushList(1 << LR | 1 << R6 | 1 << R7); 359 360 // The return PC has already been pushed on the stack. 361 __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize)); 362 __ StoreToOffset(kStoreWord, R0, SP, 0); 363} 364 365void CodeGeneratorARM::GenerateFrameExit() { 366 __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize); 367 __ PopList(1 << PC | 1 << R6 | 1 << R7); 368} 369 370void CodeGeneratorARM::Bind(HBasicBlock* block) { 371 __ Bind(GetLabelOf(block)); 372} 373 374Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const { 375 switch (load->GetType()) { 376 case Primitive::kPrimLong: 377 case Primitive::kPrimDouble: 378 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal())); 379 break; 380 381 case Primitive::kPrimInt: 382 case Primitive::kPrimNot: 383 case Primitive::kPrimFloat: 384 return Location::StackSlot(GetStackSlot(load->GetLocal())); 385 386 case Primitive::kPrimBoolean: 387 case Primitive::kPrimByte: 388 case Primitive::kPrimChar: 389 case Primitive::kPrimShort: 390 case Primitive::kPrimVoid: 391 LOG(FATAL) << "Unexpected type " << load->GetType(); 392 } 393 394 LOG(FATAL) << "Unreachable"; 395 return Location(); 396} 397 398Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) { 399 switch (type) { 400 case Primitive::kPrimBoolean: 401 case Primitive::kPrimByte: 402 case Primitive::kPrimChar: 403 case Primitive::kPrimShort: 404 case Primitive::kPrimInt: 405 case Primitive::kPrimNot: { 406 uint32_t index = gp_index_++; 407 uint32_t stack_index = stack_index_++; 408 if (index < calling_convention.GetNumberOfRegisters()) { 409 return Location::RegisterLocation(calling_convention.GetRegisterAt(index)); 410 } else { 411 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index)); 412 } 413 } 414 415 case Primitive::kPrimLong: { 416 uint32_t index = gp_index_; 417 uint32_t stack_index = stack_index_; 418 gp_index_ += 2; 419 stack_index_ += 2; 420 if (index + 1 < calling_convention.GetNumberOfRegisters()) { 421 ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair( 422 calling_convention.GetRegisterPairAt(index)); 423 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 424 } else if (index + 1 == calling_convention.GetNumberOfRegisters()) { 425 return Location::QuickParameter(stack_index); 426 } else { 427 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index)); 428 } 429 } 430 431 case Primitive::kPrimFloat: { 432 uint32_t stack_index = stack_index_++; 433 if (float_index_ % 2 == 0) { 434 float_index_ = std::max(double_index_, float_index_); 435 } 436 if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) { 437 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++)); 438 } else { 439 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index)); 440 } 441 } 442 443 case Primitive::kPrimDouble: { 444 double_index_ = std::max(double_index_, RoundUp(float_index_, 2)); 445 uint32_t stack_index = stack_index_; 446 stack_index_ += 2; 447 if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) { 448 uint32_t index = double_index_; 449 double_index_ += 2; 450 return Location::FpuRegisterPairLocation( 451 calling_convention.GetFpuRegisterAt(index), 452 calling_convention.GetFpuRegisterAt(index + 1)); 453 } else { 454 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index)); 455 } 456 } 457 458 case Primitive::kPrimVoid: 459 LOG(FATAL) << "Unexpected parameter type " << type; 460 break; 461 } 462 return Location(); 463} 464 465Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) { 466 switch (type) { 467 case Primitive::kPrimBoolean: 468 case Primitive::kPrimByte: 469 case Primitive::kPrimChar: 470 case Primitive::kPrimShort: 471 case Primitive::kPrimInt: 472 case Primitive::kPrimNot: { 473 return Location::RegisterLocation(R0); 474 } 475 476 case Primitive::kPrimFloat: { 477 return Location::FpuRegisterLocation(S0); 478 } 479 480 case Primitive::kPrimLong: { 481 return Location::RegisterPairLocation(R0, R1); 482 } 483 484 case Primitive::kPrimDouble: { 485 return Location::FpuRegisterPairLocation(S0, S1); 486 } 487 488 case Primitive::kPrimVoid: 489 return Location(); 490 } 491 UNREACHABLE(); 492 return Location(); 493} 494 495void CodeGeneratorARM::Move32(Location destination, Location source) { 496 if (source.Equals(destination)) { 497 return; 498 } 499 if (destination.IsRegister()) { 500 if (source.IsRegister()) { 501 __ Mov(destination.As<Register>(), source.As<Register>()); 502 } else if (source.IsFpuRegister()) { 503 __ vmovrs(destination.As<Register>(), source.As<SRegister>()); 504 } else { 505 __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex()); 506 } 507 } else if (destination.IsFpuRegister()) { 508 if (source.IsRegister()) { 509 __ vmovsr(destination.As<SRegister>(), source.As<Register>()); 510 } else if (source.IsFpuRegister()) { 511 __ vmovs(destination.As<SRegister>(), source.As<SRegister>()); 512 } else { 513 __ LoadSFromOffset(destination.As<SRegister>(), SP, source.GetStackIndex()); 514 } 515 } else { 516 DCHECK(destination.IsStackSlot()); 517 if (source.IsRegister()) { 518 __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex()); 519 } else if (source.IsFpuRegister()) { 520 __ StoreSToOffset(source.As<SRegister>(), SP, destination.GetStackIndex()); 521 } else { 522 DCHECK(source.IsStackSlot()); 523 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 524 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 525 } 526 } 527} 528 529void CodeGeneratorARM::Move64(Location destination, Location source) { 530 if (source.Equals(destination)) { 531 return; 532 } 533 if (destination.IsRegisterPair()) { 534 if (source.IsRegisterPair()) { 535 __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>()); 536 __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>()); 537 } else if (source.IsFpuRegister()) { 538 UNIMPLEMENTED(FATAL); 539 } else if (source.IsQuickParameter()) { 540 uint32_t argument_index = source.GetQuickParameterIndex(); 541 InvokeDexCallingConvention calling_convention; 542 __ Mov(destination.AsRegisterPairLow<Register>(), 543 calling_convention.GetRegisterAt(argument_index)); 544 __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(), 545 SP, calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()); 546 } else { 547 DCHECK(source.IsDoubleStackSlot()); 548 if (destination.AsRegisterPairLow<Register>() == R1) { 549 DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2); 550 __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex()); 551 __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize)); 552 } else { 553 __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(), 554 SP, source.GetStackIndex()); 555 } 556 } 557 } else if (destination.IsFpuRegisterPair()) { 558 if (source.IsDoubleStackSlot()) { 559 __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), 560 SP, 561 source.GetStackIndex()); 562 } else { 563 UNIMPLEMENTED(FATAL); 564 } 565 } else if (destination.IsQuickParameter()) { 566 InvokeDexCallingConvention calling_convention; 567 uint32_t argument_index = destination.GetQuickParameterIndex(); 568 if (source.IsRegisterPair()) { 569 __ Mov(calling_convention.GetRegisterAt(argument_index), 570 source.AsRegisterPairLow<Register>()); 571 __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(), 572 SP, calling_convention.GetStackOffsetOf(argument_index + 1)); 573 } else if (source.IsFpuRegister()) { 574 UNIMPLEMENTED(FATAL); 575 } else { 576 DCHECK(source.IsDoubleStackSlot()); 577 __ LoadFromOffset( 578 kLoadWord, calling_convention.GetRegisterAt(argument_index), SP, source.GetStackIndex()); 579 __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize)); 580 __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(argument_index + 1)); 581 } 582 } else { 583 DCHECK(destination.IsDoubleStackSlot()); 584 if (source.IsRegisterPair()) { 585 if (source.AsRegisterPairLow<Register>() == R1) { 586 DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2); 587 __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex()); 588 __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize)); 589 } else { 590 __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(), 591 SP, destination.GetStackIndex()); 592 } 593 } else if (source.IsQuickParameter()) { 594 InvokeDexCallingConvention calling_convention; 595 uint32_t argument_index = source.GetQuickParameterIndex(); 596 __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(argument_index), 597 SP, destination.GetStackIndex()); 598 __ LoadFromOffset(kLoadWord, R0, 599 SP, calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()); 600 __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize)); 601 } else if (source.IsFpuRegisterPair()) { 602 __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()), 603 SP, 604 destination.GetStackIndex()); 605 } else { 606 DCHECK(source.IsDoubleStackSlot()); 607 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 608 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 609 __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize)); 610 __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize)); 611 } 612 } 613} 614 615void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) { 616 LocationSummary* locations = instruction->GetLocations(); 617 if (locations != nullptr && locations->Out().Equals(location)) { 618 return; 619 } 620 621 if (instruction->IsIntConstant()) { 622 int32_t value = instruction->AsIntConstant()->GetValue(); 623 if (location.IsRegister()) { 624 __ LoadImmediate(location.As<Register>(), value); 625 } else { 626 DCHECK(location.IsStackSlot()); 627 __ LoadImmediate(IP, value); 628 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 629 } 630 } else if (instruction->IsLongConstant()) { 631 int64_t value = instruction->AsLongConstant()->GetValue(); 632 if (location.IsRegisterPair()) { 633 __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value)); 634 __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value)); 635 } else { 636 DCHECK(location.IsDoubleStackSlot()); 637 __ LoadImmediate(IP, Low32Bits(value)); 638 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 639 __ LoadImmediate(IP, High32Bits(value)); 640 __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize)); 641 } 642 } else if (instruction->IsLoadLocal()) { 643 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal()); 644 switch (instruction->GetType()) { 645 case Primitive::kPrimBoolean: 646 case Primitive::kPrimByte: 647 case Primitive::kPrimChar: 648 case Primitive::kPrimShort: 649 case Primitive::kPrimInt: 650 case Primitive::kPrimNot: 651 case Primitive::kPrimFloat: 652 Move32(location, Location::StackSlot(stack_slot)); 653 break; 654 655 case Primitive::kPrimLong: 656 case Primitive::kPrimDouble: 657 Move64(location, Location::DoubleStackSlot(stack_slot)); 658 break; 659 660 default: 661 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 662 } 663 } else { 664 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary()); 665 switch (instruction->GetType()) { 666 case Primitive::kPrimBoolean: 667 case Primitive::kPrimByte: 668 case Primitive::kPrimChar: 669 case Primitive::kPrimShort: 670 case Primitive::kPrimNot: 671 case Primitive::kPrimInt: 672 case Primitive::kPrimFloat: 673 Move32(location, locations->Out()); 674 break; 675 676 case Primitive::kPrimLong: 677 case Primitive::kPrimDouble: 678 Move64(location, locations->Out()); 679 break; 680 681 default: 682 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 683 } 684 } 685} 686 687void LocationsBuilderARM::VisitGoto(HGoto* got) { 688 got->SetLocations(nullptr); 689} 690 691void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) { 692 HBasicBlock* successor = got->GetSuccessor(); 693 DCHECK(!successor->IsExitBlock()); 694 695 HBasicBlock* block = got->GetBlock(); 696 HInstruction* previous = got->GetPrevious(); 697 698 HLoopInformation* info = block->GetLoopInformation(); 699 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) { 700 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck()); 701 GenerateSuspendCheck(info->GetSuspendCheck(), successor); 702 return; 703 } 704 705 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) { 706 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr); 707 } 708 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) { 709 __ b(codegen_->GetLabelOf(successor)); 710 } 711} 712 713void LocationsBuilderARM::VisitExit(HExit* exit) { 714 exit->SetLocations(nullptr); 715} 716 717void InstructionCodeGeneratorARM::VisitExit(HExit* exit) { 718 if (kIsDebugBuild) { 719 __ Comment("Unreachable"); 720 __ bkpt(0); 721 } 722} 723 724void LocationsBuilderARM::VisitIf(HIf* if_instr) { 725 LocationSummary* locations = 726 new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall); 727 HInstruction* cond = if_instr->InputAt(0); 728 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 729 locations->SetInAt(0, Location::RequiresRegister()); 730 } 731} 732 733void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) { 734 HInstruction* cond = if_instr->InputAt(0); 735 if (cond->IsIntConstant()) { 736 // Constant condition, statically compared against 1. 737 int32_t cond_value = cond->AsIntConstant()->GetValue(); 738 if (cond_value == 1) { 739 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 740 if_instr->IfTrueSuccessor())) { 741 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor())); 742 } 743 return; 744 } else { 745 DCHECK_EQ(cond_value, 0); 746 } 747 } else { 748 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 749 // Condition has been materialized, compare the output to 0 750 DCHECK(if_instr->GetLocations()->InAt(0).IsRegister()); 751 __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(), 752 ShifterOperand(0)); 753 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE); 754 } else { 755 // Condition has not been materialized, use its inputs as the 756 // comparison and its condition as the branch condition. 757 LocationSummary* locations = cond->GetLocations(); 758 if (locations->InAt(1).IsRegister()) { 759 __ cmp(locations->InAt(0).As<Register>(), 760 ShifterOperand(locations->InAt(1).As<Register>())); 761 } else { 762 DCHECK(locations->InAt(1).IsConstant()); 763 int32_t value = 764 locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 765 ShifterOperand operand; 766 if (ShifterOperand::CanHoldArm(value, &operand)) { 767 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value)); 768 } else { 769 Register temp = IP; 770 __ LoadImmediate(temp, value); 771 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp)); 772 } 773 } 774 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), 775 ARMCondition(cond->AsCondition()->GetCondition())); 776 } 777 } 778 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 779 if_instr->IfFalseSuccessor())) { 780 __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor())); 781 } 782} 783 784 785void LocationsBuilderARM::VisitCondition(HCondition* comp) { 786 LocationSummary* locations = 787 new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall); 788 locations->SetInAt(0, Location::RequiresRegister()); 789 locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1))); 790 if (comp->NeedsMaterialization()) { 791 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 792 } 793} 794 795void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) { 796 if (!comp->NeedsMaterialization()) return; 797 798 LocationSummary* locations = comp->GetLocations(); 799 if (locations->InAt(1).IsRegister()) { 800 __ cmp(locations->InAt(0).As<Register>(), 801 ShifterOperand(locations->InAt(1).As<Register>())); 802 } else { 803 DCHECK(locations->InAt(1).IsConstant()); 804 int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 805 ShifterOperand operand; 806 if (ShifterOperand::CanHoldArm(value, &operand)) { 807 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value)); 808 } else { 809 Register temp = IP; 810 __ LoadImmediate(temp, value); 811 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp)); 812 } 813 } 814 __ it(ARMCondition(comp->GetCondition()), kItElse); 815 __ mov(locations->Out().As<Register>(), ShifterOperand(1), 816 ARMCondition(comp->GetCondition())); 817 __ mov(locations->Out().As<Register>(), ShifterOperand(0), 818 ARMOppositeCondition(comp->GetCondition())); 819} 820 821void LocationsBuilderARM::VisitEqual(HEqual* comp) { 822 VisitCondition(comp); 823} 824 825void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) { 826 VisitCondition(comp); 827} 828 829void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) { 830 VisitCondition(comp); 831} 832 833void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) { 834 VisitCondition(comp); 835} 836 837void LocationsBuilderARM::VisitLessThan(HLessThan* comp) { 838 VisitCondition(comp); 839} 840 841void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) { 842 VisitCondition(comp); 843} 844 845void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 846 VisitCondition(comp); 847} 848 849void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 850 VisitCondition(comp); 851} 852 853void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) { 854 VisitCondition(comp); 855} 856 857void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) { 858 VisitCondition(comp); 859} 860 861void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 862 VisitCondition(comp); 863} 864 865void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 866 VisitCondition(comp); 867} 868 869void LocationsBuilderARM::VisitLocal(HLocal* local) { 870 local->SetLocations(nullptr); 871} 872 873void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) { 874 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock()); 875} 876 877void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) { 878 load->SetLocations(nullptr); 879} 880 881void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) { 882 // Nothing to do, this is driven by the code generator. 883} 884 885void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) { 886 LocationSummary* locations = 887 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall); 888 switch (store->InputAt(1)->GetType()) { 889 case Primitive::kPrimBoolean: 890 case Primitive::kPrimByte: 891 case Primitive::kPrimChar: 892 case Primitive::kPrimShort: 893 case Primitive::kPrimInt: 894 case Primitive::kPrimNot: 895 case Primitive::kPrimFloat: 896 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal()))); 897 break; 898 899 case Primitive::kPrimLong: 900 case Primitive::kPrimDouble: 901 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal()))); 902 break; 903 904 default: 905 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType(); 906 } 907} 908 909void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) { 910} 911 912void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) { 913 LocationSummary* locations = 914 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 915 locations->SetOut(Location::ConstantLocation(constant)); 916} 917 918void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) { 919 // Will be generated at use site. 920} 921 922void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) { 923 LocationSummary* locations = 924 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 925 locations->SetOut(Location::ConstantLocation(constant)); 926} 927 928void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) { 929 // Will be generated at use site. 930} 931 932void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) { 933 LocationSummary* locations = 934 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 935 locations->SetOut(Location::ConstantLocation(constant)); 936} 937 938void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) { 939 // Will be generated at use site. 940} 941 942void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) { 943 LocationSummary* locations = 944 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 945 locations->SetOut(Location::ConstantLocation(constant)); 946} 947 948void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) { 949 // Will be generated at use site. 950} 951 952void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) { 953 ret->SetLocations(nullptr); 954} 955 956void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) { 957 codegen_->GenerateFrameExit(); 958} 959 960void LocationsBuilderARM::VisitReturn(HReturn* ret) { 961 LocationSummary* locations = 962 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall); 963 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType())); 964} 965 966void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) { 967 codegen_->GenerateFrameExit(); 968} 969 970void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) { 971 HandleInvoke(invoke); 972} 973 974void InstructionCodeGeneratorARM::LoadCurrentMethod(Register reg) { 975 __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset); 976} 977 978void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) { 979 Register temp = invoke->GetLocations()->GetTemp(0).As<Register>(); 980 uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>); 981 size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() + 982 invoke->GetIndexInDexCache() * kArmWordSize; 983 984 // TODO: Implement all kinds of calls: 985 // 1) boot -> boot 986 // 2) app -> boot 987 // 3) app -> app 988 // 989 // Currently we implement the app -> app logic, which looks up in the resolve cache. 990 991 // temp = method; 992 LoadCurrentMethod(temp); 993 // temp = temp->dex_cache_resolved_methods_; 994 __ LoadFromOffset(kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); 995 // temp = temp[index_in_cache] 996 __ LoadFromOffset(kLoadWord, temp, temp, index_in_cache); 997 // LR = temp[offset_of_quick_compiled_code] 998 __ LoadFromOffset(kLoadWord, LR, temp, 999 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()); 1000 // LR() 1001 __ blx(LR); 1002 1003 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1004 DCHECK(!codegen_->IsLeafMethod()); 1005} 1006 1007void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1008 HandleInvoke(invoke); 1009} 1010 1011void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) { 1012 LocationSummary* locations = 1013 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall); 1014 locations->AddTemp(Location::RegisterLocation(R0)); 1015 1016 InvokeDexCallingConventionVisitor calling_convention_visitor; 1017 for (size_t i = 0; i < invoke->InputCount(); i++) { 1018 HInstruction* input = invoke->InputAt(i); 1019 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType())); 1020 } 1021 1022 locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType())); 1023} 1024 1025 1026void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1027 Register temp = invoke->GetLocations()->GetTemp(0).As<Register>(); 1028 uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() + 1029 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry); 1030 LocationSummary* locations = invoke->GetLocations(); 1031 Location receiver = locations->InAt(0); 1032 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1033 // temp = object->GetClass(); 1034 if (receiver.IsStackSlot()) { 1035 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); 1036 __ LoadFromOffset(kLoadWord, temp, temp, class_offset); 1037 } else { 1038 __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset); 1039 } 1040 // temp = temp->GetMethodAt(method_offset); 1041 uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value(); 1042 __ LoadFromOffset(kLoadWord, temp, temp, method_offset); 1043 // LR = temp->GetEntryPoint(); 1044 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); 1045 // LR(); 1046 __ blx(LR); 1047 DCHECK(!codegen_->IsLeafMethod()); 1048 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1049} 1050 1051void LocationsBuilderARM::VisitNeg(HNeg* neg) { 1052 LocationSummary* locations = 1053 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall); 1054 switch (neg->GetResultType()) { 1055 case Primitive::kPrimInt: 1056 case Primitive::kPrimLong: { 1057 bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong); 1058 locations->SetInAt(0, Location::RequiresRegister()); 1059 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1060 break; 1061 } 1062 1063 case Primitive::kPrimFloat: 1064 case Primitive::kPrimDouble: 1065 LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType(); 1066 break; 1067 1068 default: 1069 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1070 } 1071} 1072 1073void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) { 1074 LocationSummary* locations = neg->GetLocations(); 1075 Location out = locations->Out(); 1076 Location in = locations->InAt(0); 1077 switch (neg->GetResultType()) { 1078 case Primitive::kPrimInt: 1079 DCHECK(in.IsRegister()); 1080 __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0)); 1081 break; 1082 1083 case Primitive::kPrimLong: 1084 DCHECK(in.IsRegisterPair()); 1085 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag) 1086 __ rsbs(out.AsRegisterPairLow<Register>(), 1087 in.AsRegisterPairLow<Register>(), 1088 ShifterOperand(0)); 1089 // We cannot emit an RSC (Reverse Subtract with Carry) 1090 // instruction here, as it does not exist in the Thumb-2 1091 // instruction set. We use the following approach 1092 // using SBC and SUB instead. 1093 // 1094 // out.hi = -C 1095 __ sbc(out.AsRegisterPairHigh<Register>(), 1096 out.AsRegisterPairHigh<Register>(), 1097 ShifterOperand(out.AsRegisterPairHigh<Register>())); 1098 // out.hi = out.hi - in.hi 1099 __ sub(out.AsRegisterPairHigh<Register>(), 1100 out.AsRegisterPairHigh<Register>(), 1101 ShifterOperand(in.AsRegisterPairHigh<Register>())); 1102 break; 1103 1104 case Primitive::kPrimFloat: 1105 case Primitive::kPrimDouble: 1106 LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType(); 1107 break; 1108 1109 default: 1110 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1111 } 1112} 1113 1114void LocationsBuilderARM::VisitAdd(HAdd* add) { 1115 LocationSummary* locations = 1116 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall); 1117 switch (add->GetResultType()) { 1118 case Primitive::kPrimInt: 1119 case Primitive::kPrimLong: { 1120 bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong); 1121 locations->SetInAt(0, Location::RequiresRegister()); 1122 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1))); 1123 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1124 break; 1125 } 1126 1127 case Primitive::kPrimFloat: 1128 case Primitive::kPrimDouble: { 1129 locations->SetInAt(0, Location::RequiresFpuRegister()); 1130 locations->SetInAt(1, Location::RequiresFpuRegister()); 1131 locations->SetOut(Location::RequiresFpuRegister()); 1132 break; 1133 } 1134 1135 default: 1136 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1137 } 1138} 1139 1140void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) { 1141 LocationSummary* locations = add->GetLocations(); 1142 Location out = locations->Out(); 1143 Location first = locations->InAt(0); 1144 Location second = locations->InAt(1); 1145 switch (add->GetResultType()) { 1146 case Primitive::kPrimInt: 1147 if (second.IsRegister()) { 1148 __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>())); 1149 } else { 1150 __ AddConstant(out.As<Register>(), 1151 first.As<Register>(), 1152 second.GetConstant()->AsIntConstant()->GetValue()); 1153 } 1154 break; 1155 1156 case Primitive::kPrimLong: 1157 __ adds(out.AsRegisterPairLow<Register>(), 1158 first.AsRegisterPairLow<Register>(), 1159 ShifterOperand(second.AsRegisterPairLow<Register>())); 1160 __ adc(out.AsRegisterPairHigh<Register>(), 1161 first.AsRegisterPairHigh<Register>(), 1162 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1163 break; 1164 1165 case Primitive::kPrimFloat: 1166 __ vadds(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>()); 1167 break; 1168 1169 case Primitive::kPrimDouble: 1170 __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1171 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 1172 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 1173 break; 1174 1175 default: 1176 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1177 } 1178} 1179 1180void LocationsBuilderARM::VisitSub(HSub* sub) { 1181 LocationSummary* locations = 1182 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall); 1183 switch (sub->GetResultType()) { 1184 case Primitive::kPrimInt: 1185 case Primitive::kPrimLong: { 1186 bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong); 1187 locations->SetInAt(0, Location::RequiresRegister()); 1188 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1))); 1189 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1190 break; 1191 } 1192 case Primitive::kPrimFloat: 1193 case Primitive::kPrimDouble: { 1194 locations->SetInAt(0, Location::RequiresFpuRegister()); 1195 locations->SetInAt(1, Location::RequiresFpuRegister()); 1196 locations->SetOut(Location::RequiresFpuRegister()); 1197 break; 1198 } 1199 default: 1200 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1201 } 1202} 1203 1204void InstructionCodeGeneratorARM::VisitSub(HSub* sub) { 1205 LocationSummary* locations = sub->GetLocations(); 1206 Location out = locations->Out(); 1207 Location first = locations->InAt(0); 1208 Location second = locations->InAt(1); 1209 switch (sub->GetResultType()) { 1210 case Primitive::kPrimInt: { 1211 if (second.IsRegister()) { 1212 __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>())); 1213 } else { 1214 __ AddConstant(out.As<Register>(), 1215 first.As<Register>(), 1216 -second.GetConstant()->AsIntConstant()->GetValue()); 1217 } 1218 break; 1219 } 1220 1221 case Primitive::kPrimLong: { 1222 __ subs(out.AsRegisterPairLow<Register>(), 1223 first.AsRegisterPairLow<Register>(), 1224 ShifterOperand(second.AsRegisterPairLow<Register>())); 1225 __ sbc(out.AsRegisterPairHigh<Register>(), 1226 first.AsRegisterPairHigh<Register>(), 1227 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1228 break; 1229 } 1230 1231 case Primitive::kPrimFloat: { 1232 __ vsubs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>()); 1233 break; 1234 } 1235 1236 case Primitive::kPrimDouble: { 1237 __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1238 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 1239 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 1240 break; 1241 } 1242 1243 1244 default: 1245 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1246 } 1247} 1248 1249void LocationsBuilderARM::VisitMul(HMul* mul) { 1250 LocationSummary* locations = 1251 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall); 1252 switch (mul->GetResultType()) { 1253 case Primitive::kPrimInt: 1254 case Primitive::kPrimLong: { 1255 locations->SetInAt(0, Location::RequiresRegister()); 1256 locations->SetInAt(1, Location::RequiresRegister()); 1257 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1258 break; 1259 } 1260 1261 case Primitive::kPrimFloat: 1262 case Primitive::kPrimDouble: { 1263 locations->SetInAt(0, Location::RequiresFpuRegister()); 1264 locations->SetInAt(1, Location::RequiresFpuRegister()); 1265 locations->SetOut(Location::RequiresFpuRegister()); 1266 break; 1267 } 1268 1269 default: 1270 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1271 } 1272} 1273 1274void InstructionCodeGeneratorARM::VisitMul(HMul* mul) { 1275 LocationSummary* locations = mul->GetLocations(); 1276 Location out = locations->Out(); 1277 Location first = locations->InAt(0); 1278 Location second = locations->InAt(1); 1279 switch (mul->GetResultType()) { 1280 case Primitive::kPrimInt: { 1281 __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>()); 1282 break; 1283 } 1284 case Primitive::kPrimLong: { 1285 Register out_hi = out.AsRegisterPairHigh<Register>(); 1286 Register out_lo = out.AsRegisterPairLow<Register>(); 1287 Register in1_hi = first.AsRegisterPairHigh<Register>(); 1288 Register in1_lo = first.AsRegisterPairLow<Register>(); 1289 Register in2_hi = second.AsRegisterPairHigh<Register>(); 1290 Register in2_lo = second.AsRegisterPairLow<Register>(); 1291 1292 // Extra checks to protect caused by the existence of R1_R2. 1293 // The algorithm is wrong if out.hi is either in1.lo or in2.lo: 1294 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2); 1295 DCHECK_NE(out_hi, in1_lo); 1296 DCHECK_NE(out_hi, in2_lo); 1297 1298 // input: in1 - 64 bits, in2 - 64 bits 1299 // output: out 1300 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo 1301 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32] 1302 // parts: out.lo = (in1.lo * in2.lo)[31:0] 1303 1304 // IP <- in1.lo * in2.hi 1305 __ mul(IP, in1_lo, in2_hi); 1306 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo 1307 __ mla(out_hi, in1_hi, in2_lo, IP); 1308 // out.lo <- (in1.lo * in2.lo)[31:0]; 1309 __ umull(out_lo, IP, in1_lo, in2_lo); 1310 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32] 1311 __ add(out_hi, out_hi, ShifterOperand(IP)); 1312 break; 1313 } 1314 1315 case Primitive::kPrimFloat: { 1316 __ vmuls(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>()); 1317 break; 1318 } 1319 1320 case Primitive::kPrimDouble: { 1321 __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1322 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 1323 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 1324 break; 1325 } 1326 1327 default: 1328 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1329 } 1330} 1331 1332void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) { 1333 LocationSummary* locations = 1334 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1335 InvokeRuntimeCallingConvention calling_convention; 1336 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1337 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1338 locations->SetOut(Location::RegisterLocation(R0)); 1339} 1340 1341void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) { 1342 InvokeRuntimeCallingConvention calling_convention; 1343 LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 1344 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 1345 1346 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocObjectWithAccessCheck).Int32Value(); 1347 __ LoadFromOffset(kLoadWord, LR, TR, offset); 1348 __ blx(LR); 1349 1350 codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); 1351 DCHECK(!codegen_->IsLeafMethod()); 1352} 1353 1354void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) { 1355 LocationSummary* locations = 1356 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1357 InvokeRuntimeCallingConvention calling_convention; 1358 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1359 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1360 locations->SetOut(Location::RegisterLocation(R0)); 1361 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 1362} 1363 1364void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) { 1365 InvokeRuntimeCallingConvention calling_convention; 1366 LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 1367 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 1368 1369 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocArrayWithAccessCheck).Int32Value(); 1370 __ LoadFromOffset(kLoadWord, LR, TR, offset); 1371 __ blx(LR); 1372 1373 codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); 1374 DCHECK(!codegen_->IsLeafMethod()); 1375} 1376 1377void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) { 1378 LocationSummary* locations = 1379 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1380 Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); 1381 if (location.IsStackSlot()) { 1382 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 1383 } else if (location.IsDoubleStackSlot()) { 1384 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 1385 } 1386 locations->SetOut(location); 1387} 1388 1389void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) { 1390 // Nothing to do, the parameter is already at its location. 1391} 1392 1393void LocationsBuilderARM::VisitNot(HNot* not_) { 1394 LocationSummary* locations = 1395 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall); 1396 locations->SetInAt(0, Location::RequiresRegister()); 1397 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1398} 1399 1400void InstructionCodeGeneratorARM::VisitNot(HNot* not_) { 1401 LocationSummary* locations = not_->GetLocations(); 1402 Location out = locations->Out(); 1403 Location in = locations->InAt(0); 1404 switch (not_->InputAt(0)->GetType()) { 1405 case Primitive::kPrimBoolean: 1406 __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1)); 1407 break; 1408 1409 case Primitive::kPrimInt: 1410 __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>())); 1411 break; 1412 1413 case Primitive::kPrimLong: 1414 LOG(FATAL) << "Not yet implemented type for not operation " << not_->GetResultType(); 1415 break; 1416 1417 default: 1418 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType(); 1419 } 1420} 1421 1422void LocationsBuilderARM::VisitCompare(HCompare* compare) { 1423 LocationSummary* locations = 1424 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); 1425 locations->SetInAt(0, Location::RequiresRegister()); 1426 locations->SetInAt(1, Location::RequiresRegister()); 1427 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1428} 1429 1430void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) { 1431 Label greater, done; 1432 LocationSummary* locations = compare->GetLocations(); 1433 switch (compare->InputAt(0)->GetType()) { 1434 case Primitive::kPrimLong: { 1435 Register output = locations->Out().As<Register>(); 1436 Location left = locations->InAt(0); 1437 Location right = locations->InAt(1); 1438 Label less, greater, done; 1439 __ cmp(left.AsRegisterPairHigh<Register>(), 1440 ShifterOperand(right.AsRegisterPairHigh<Register>())); // Signed compare. 1441 __ b(&less, LT); 1442 __ b(&greater, GT); 1443 // Do LoadImmediate before any `cmp`, as LoadImmediate might affect 1444 // the status flags. 1445 __ LoadImmediate(output, 0); 1446 __ cmp(left.AsRegisterPairLow<Register>(), 1447 ShifterOperand(right.AsRegisterPairLow<Register>())); // Unsigned compare. 1448 __ b(&done, EQ); 1449 __ b(&less, CC); 1450 1451 __ Bind(&greater); 1452 __ LoadImmediate(output, 1); 1453 __ b(&done); 1454 1455 __ Bind(&less); 1456 __ LoadImmediate(output, -1); 1457 1458 __ Bind(&done); 1459 break; 1460 } 1461 default: 1462 LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType(); 1463 } 1464} 1465 1466void LocationsBuilderARM::VisitPhi(HPhi* instruction) { 1467 LocationSummary* locations = 1468 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1469 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) { 1470 locations->SetInAt(i, Location::Any()); 1471 } 1472 locations->SetOut(Location::Any()); 1473} 1474 1475void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) { 1476 LOG(FATAL) << "Unreachable"; 1477} 1478 1479void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 1480 LocationSummary* locations = 1481 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1482 bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot; 1483 locations->SetInAt(0, Location::RequiresRegister()); 1484 locations->SetInAt(1, Location::RequiresRegister()); 1485 // Temporary registers for the write barrier. 1486 if (is_object_type) { 1487 locations->AddTemp(Location::RequiresRegister()); 1488 locations->AddTemp(Location::RequiresRegister()); 1489 } 1490} 1491 1492void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 1493 LocationSummary* locations = instruction->GetLocations(); 1494 Register obj = locations->InAt(0).As<Register>(); 1495 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 1496 Primitive::Type field_type = instruction->GetFieldType(); 1497 1498 switch (field_type) { 1499 case Primitive::kPrimBoolean: 1500 case Primitive::kPrimByte: { 1501 Register value = locations->InAt(1).As<Register>(); 1502 __ StoreToOffset(kStoreByte, value, obj, offset); 1503 break; 1504 } 1505 1506 case Primitive::kPrimShort: 1507 case Primitive::kPrimChar: { 1508 Register value = locations->InAt(1).As<Register>(); 1509 __ StoreToOffset(kStoreHalfword, value, obj, offset); 1510 break; 1511 } 1512 1513 case Primitive::kPrimInt: 1514 case Primitive::kPrimNot: { 1515 Register value = locations->InAt(1).As<Register>(); 1516 __ StoreToOffset(kStoreWord, value, obj, offset); 1517 if (field_type == Primitive::kPrimNot) { 1518 Register temp = locations->GetTemp(0).As<Register>(); 1519 Register card = locations->GetTemp(1).As<Register>(); 1520 codegen_->MarkGCCard(temp, card, obj, value); 1521 } 1522 break; 1523 } 1524 1525 case Primitive::kPrimLong: { 1526 Location value = locations->InAt(1); 1527 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 1528 break; 1529 } 1530 1531 case Primitive::kPrimFloat: 1532 case Primitive::kPrimDouble: 1533 LOG(FATAL) << "Unimplemented register type " << field_type; 1534 UNREACHABLE(); 1535 case Primitive::kPrimVoid: 1536 LOG(FATAL) << "Unreachable type " << field_type; 1537 UNREACHABLE(); 1538 } 1539} 1540 1541void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 1542 LocationSummary* locations = 1543 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1544 locations->SetInAt(0, Location::RequiresRegister()); 1545 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1546} 1547 1548void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 1549 LocationSummary* locations = instruction->GetLocations(); 1550 Register obj = locations->InAt(0).As<Register>(); 1551 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 1552 1553 switch (instruction->GetType()) { 1554 case Primitive::kPrimBoolean: { 1555 Register out = locations->Out().As<Register>(); 1556 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 1557 break; 1558 } 1559 1560 case Primitive::kPrimByte: { 1561 Register out = locations->Out().As<Register>(); 1562 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 1563 break; 1564 } 1565 1566 case Primitive::kPrimShort: { 1567 Register out = locations->Out().As<Register>(); 1568 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 1569 break; 1570 } 1571 1572 case Primitive::kPrimChar: { 1573 Register out = locations->Out().As<Register>(); 1574 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 1575 break; 1576 } 1577 1578 case Primitive::kPrimInt: 1579 case Primitive::kPrimNot: { 1580 Register out = locations->Out().As<Register>(); 1581 __ LoadFromOffset(kLoadWord, out, obj, offset); 1582 break; 1583 } 1584 1585 case Primitive::kPrimLong: { 1586 // TODO: support volatile. 1587 Location out = locations->Out(); 1588 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 1589 break; 1590 } 1591 1592 case Primitive::kPrimFloat: 1593 case Primitive::kPrimDouble: 1594 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 1595 UNREACHABLE(); 1596 case Primitive::kPrimVoid: 1597 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 1598 UNREACHABLE(); 1599 } 1600} 1601 1602void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) { 1603 LocationSummary* locations = 1604 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1605 locations->SetInAt(0, Location::RequiresRegister()); 1606 if (instruction->HasUses()) { 1607 locations->SetOut(Location::SameAsFirstInput()); 1608 } 1609} 1610 1611void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) { 1612 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction); 1613 codegen_->AddSlowPath(slow_path); 1614 1615 LocationSummary* locations = instruction->GetLocations(); 1616 Location obj = locations->InAt(0); 1617 1618 if (obj.IsRegister()) { 1619 __ cmp(obj.As<Register>(), ShifterOperand(0)); 1620 __ b(slow_path->GetEntryLabel(), EQ); 1621 } else { 1622 DCHECK(obj.IsConstant()) << obj; 1623 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0); 1624 __ b(slow_path->GetEntryLabel()); 1625 } 1626} 1627 1628void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) { 1629 LocationSummary* locations = 1630 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1631 locations->SetInAt(0, Location::RequiresRegister()); 1632 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 1633 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1634} 1635 1636void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) { 1637 LocationSummary* locations = instruction->GetLocations(); 1638 Register obj = locations->InAt(0).As<Register>(); 1639 Location index = locations->InAt(1); 1640 1641 switch (instruction->GetType()) { 1642 case Primitive::kPrimBoolean: { 1643 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 1644 Register out = locations->Out().As<Register>(); 1645 if (index.IsConstant()) { 1646 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 1647 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 1648 } else { 1649 __ add(IP, obj, ShifterOperand(index.As<Register>())); 1650 __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset); 1651 } 1652 break; 1653 } 1654 1655 case Primitive::kPrimByte: { 1656 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value(); 1657 Register out = locations->Out().As<Register>(); 1658 if (index.IsConstant()) { 1659 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 1660 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 1661 } else { 1662 __ add(IP, obj, ShifterOperand(index.As<Register>())); 1663 __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset); 1664 } 1665 break; 1666 } 1667 1668 case Primitive::kPrimShort: { 1669 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value(); 1670 Register out = locations->Out().As<Register>(); 1671 if (index.IsConstant()) { 1672 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 1673 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 1674 } else { 1675 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2)); 1676 __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset); 1677 } 1678 break; 1679 } 1680 1681 case Primitive::kPrimChar: { 1682 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 1683 Register out = locations->Out().As<Register>(); 1684 if (index.IsConstant()) { 1685 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 1686 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 1687 } else { 1688 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2)); 1689 __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset); 1690 } 1691 break; 1692 } 1693 1694 case Primitive::kPrimInt: 1695 case Primitive::kPrimNot: { 1696 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t)); 1697 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 1698 Register out = locations->Out().As<Register>(); 1699 if (index.IsConstant()) { 1700 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 1701 __ LoadFromOffset(kLoadWord, out, obj, offset); 1702 } else { 1703 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4)); 1704 __ LoadFromOffset(kLoadWord, out, IP, data_offset); 1705 } 1706 break; 1707 } 1708 1709 case Primitive::kPrimLong: { 1710 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 1711 Location out = locations->Out(); 1712 if (index.IsConstant()) { 1713 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 1714 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 1715 } else { 1716 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8)); 1717 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset); 1718 } 1719 break; 1720 } 1721 1722 case Primitive::kPrimFloat: 1723 case Primitive::kPrimDouble: 1724 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 1725 UNREACHABLE(); 1726 case Primitive::kPrimVoid: 1727 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 1728 UNREACHABLE(); 1729 } 1730} 1731 1732void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) { 1733 Primitive::Type value_type = instruction->GetComponentType(); 1734 bool is_object = value_type == Primitive::kPrimNot; 1735 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 1736 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall); 1737 if (is_object) { 1738 InvokeRuntimeCallingConvention calling_convention; 1739 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1740 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1741 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 1742 } else { 1743 locations->SetInAt(0, Location::RequiresRegister()); 1744 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 1745 locations->SetInAt(2, Location::RequiresRegister()); 1746 } 1747} 1748 1749void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) { 1750 LocationSummary* locations = instruction->GetLocations(); 1751 Register obj = locations->InAt(0).As<Register>(); 1752 Location index = locations->InAt(1); 1753 Primitive::Type value_type = instruction->GetComponentType(); 1754 1755 switch (value_type) { 1756 case Primitive::kPrimBoolean: 1757 case Primitive::kPrimByte: { 1758 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 1759 Register value = locations->InAt(2).As<Register>(); 1760 if (index.IsConstant()) { 1761 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 1762 __ StoreToOffset(kStoreByte, value, obj, offset); 1763 } else { 1764 __ add(IP, obj, ShifterOperand(index.As<Register>())); 1765 __ StoreToOffset(kStoreByte, value, IP, data_offset); 1766 } 1767 break; 1768 } 1769 1770 case Primitive::kPrimShort: 1771 case Primitive::kPrimChar: { 1772 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 1773 Register value = locations->InAt(2).As<Register>(); 1774 if (index.IsConstant()) { 1775 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 1776 __ StoreToOffset(kStoreHalfword, value, obj, offset); 1777 } else { 1778 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2)); 1779 __ StoreToOffset(kStoreHalfword, value, IP, data_offset); 1780 } 1781 break; 1782 } 1783 1784 case Primitive::kPrimInt: { 1785 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 1786 Register value = locations->InAt(2).As<Register>(); 1787 if (index.IsConstant()) { 1788 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 1789 __ StoreToOffset(kStoreWord, value, obj, offset); 1790 } else { 1791 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4)); 1792 __ StoreToOffset(kStoreWord, value, IP, data_offset); 1793 } 1794 break; 1795 } 1796 1797 case Primitive::kPrimNot: { 1798 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAputObject).Int32Value(); 1799 __ LoadFromOffset(kLoadWord, LR, TR, offset); 1800 __ blx(LR); 1801 codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); 1802 DCHECK(!codegen_->IsLeafMethod()); 1803 break; 1804 } 1805 1806 case Primitive::kPrimLong: { 1807 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 1808 Location value = locations->InAt(2); 1809 if (index.IsConstant()) { 1810 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 1811 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 1812 } else { 1813 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8)); 1814 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset); 1815 } 1816 break; 1817 } 1818 1819 case Primitive::kPrimFloat: 1820 case Primitive::kPrimDouble: 1821 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 1822 UNREACHABLE(); 1823 case Primitive::kPrimVoid: 1824 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 1825 UNREACHABLE(); 1826 } 1827} 1828 1829void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) { 1830 LocationSummary* locations = 1831 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1832 locations->SetInAt(0, Location::RequiresRegister()); 1833 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1834} 1835 1836void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) { 1837 LocationSummary* locations = instruction->GetLocations(); 1838 uint32_t offset = mirror::Array::LengthOffset().Uint32Value(); 1839 Register obj = locations->InAt(0).As<Register>(); 1840 Register out = locations->Out().As<Register>(); 1841 __ LoadFromOffset(kLoadWord, out, obj, offset); 1842} 1843 1844void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) { 1845 LocationSummary* locations = 1846 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1847 locations->SetInAt(0, Location::RequiresRegister()); 1848 locations->SetInAt(1, Location::RequiresRegister()); 1849 if (instruction->HasUses()) { 1850 locations->SetOut(Location::SameAsFirstInput()); 1851 } 1852} 1853 1854void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) { 1855 LocationSummary* locations = instruction->GetLocations(); 1856 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM( 1857 instruction, locations->InAt(0), locations->InAt(1)); 1858 codegen_->AddSlowPath(slow_path); 1859 1860 Register index = locations->InAt(0).As<Register>(); 1861 Register length = locations->InAt(1).As<Register>(); 1862 1863 __ cmp(index, ShifterOperand(length)); 1864 __ b(slow_path->GetEntryLabel(), CS); 1865} 1866 1867void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) { 1868 Label is_null; 1869 __ CompareAndBranchIfZero(value, &is_null); 1870 __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value()); 1871 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); 1872 __ strb(card, Address(card, temp)); 1873 __ Bind(&is_null); 1874} 1875 1876void LocationsBuilderARM::VisitTemporary(HTemporary* temp) { 1877 temp->SetLocations(nullptr); 1878} 1879 1880void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) { 1881 // Nothing to do, this is driven by the code generator. 1882} 1883 1884void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) { 1885 LOG(FATAL) << "Unreachable"; 1886} 1887 1888void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) { 1889 codegen_->GetMoveResolver()->EmitNativeCode(instruction); 1890} 1891 1892void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) { 1893 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath); 1894} 1895 1896void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) { 1897 HBasicBlock* block = instruction->GetBlock(); 1898 if (block->GetLoopInformation() != nullptr) { 1899 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction); 1900 // The back edge will generate the suspend check. 1901 return; 1902 } 1903 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) { 1904 // The goto will generate the suspend check. 1905 return; 1906 } 1907 GenerateSuspendCheck(instruction, nullptr); 1908} 1909 1910void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction, 1911 HBasicBlock* successor) { 1912 SuspendCheckSlowPathARM* slow_path = 1913 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor); 1914 codegen_->AddSlowPath(slow_path); 1915 1916 __ subs(R4, R4, ShifterOperand(1)); 1917 if (successor == nullptr) { 1918 __ b(slow_path->GetEntryLabel(), EQ); 1919 __ Bind(slow_path->GetReturnLabel()); 1920 } else { 1921 __ b(codegen_->GetLabelOf(successor), NE); 1922 __ b(slow_path->GetEntryLabel()); 1923 } 1924} 1925 1926ArmAssembler* ParallelMoveResolverARM::GetAssembler() const { 1927 return codegen_->GetAssembler(); 1928} 1929 1930void ParallelMoveResolverARM::EmitMove(size_t index) { 1931 MoveOperands* move = moves_.Get(index); 1932 Location source = move->GetSource(); 1933 Location destination = move->GetDestination(); 1934 1935 if (source.IsRegister()) { 1936 if (destination.IsRegister()) { 1937 __ Mov(destination.As<Register>(), source.As<Register>()); 1938 } else { 1939 DCHECK(destination.IsStackSlot()); 1940 __ StoreToOffset(kStoreWord, source.As<Register>(), 1941 SP, destination.GetStackIndex()); 1942 } 1943 } else if (source.IsStackSlot()) { 1944 if (destination.IsRegister()) { 1945 __ LoadFromOffset(kLoadWord, destination.As<Register>(), 1946 SP, source.GetStackIndex()); 1947 } else { 1948 DCHECK(destination.IsStackSlot()); 1949 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 1950 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 1951 } 1952 } else { 1953 DCHECK(source.IsConstant()); 1954 DCHECK(source.GetConstant()->IsIntConstant()); 1955 int32_t value = source.GetConstant()->AsIntConstant()->GetValue(); 1956 if (destination.IsRegister()) { 1957 __ LoadImmediate(destination.As<Register>(), value); 1958 } else { 1959 DCHECK(destination.IsStackSlot()); 1960 __ LoadImmediate(IP, value); 1961 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 1962 } 1963 } 1964} 1965 1966void ParallelMoveResolverARM::Exchange(Register reg, int mem) { 1967 __ Mov(IP, reg); 1968 __ LoadFromOffset(kLoadWord, reg, SP, mem); 1969 __ StoreToOffset(kStoreWord, IP, SP, mem); 1970} 1971 1972void ParallelMoveResolverARM::Exchange(int mem1, int mem2) { 1973 ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters()); 1974 int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0; 1975 __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()), 1976 SP, mem1 + stack_offset); 1977 __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset); 1978 __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()), 1979 SP, mem2 + stack_offset); 1980 __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset); 1981} 1982 1983void ParallelMoveResolverARM::EmitSwap(size_t index) { 1984 MoveOperands* move = moves_.Get(index); 1985 Location source = move->GetSource(); 1986 Location destination = move->GetDestination(); 1987 1988 if (source.IsRegister() && destination.IsRegister()) { 1989 DCHECK_NE(source.As<Register>(), IP); 1990 DCHECK_NE(destination.As<Register>(), IP); 1991 __ Mov(IP, source.As<Register>()); 1992 __ Mov(source.As<Register>(), destination.As<Register>()); 1993 __ Mov(destination.As<Register>(), IP); 1994 } else if (source.IsRegister() && destination.IsStackSlot()) { 1995 Exchange(source.As<Register>(), destination.GetStackIndex()); 1996 } else if (source.IsStackSlot() && destination.IsRegister()) { 1997 Exchange(destination.As<Register>(), source.GetStackIndex()); 1998 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 1999 Exchange(source.GetStackIndex(), destination.GetStackIndex()); 2000 } else { 2001 LOG(FATAL) << "Unimplemented"; 2002 } 2003} 2004 2005void ParallelMoveResolverARM::SpillScratch(int reg) { 2006 __ Push(static_cast<Register>(reg)); 2007} 2008 2009void ParallelMoveResolverARM::RestoreScratch(int reg) { 2010 __ Pop(static_cast<Register>(reg)); 2011} 2012 2013} // namespace arm 2014} // namespace art 2015