code_generator_x86_64.cc revision 1a43dd78d054dbad8d7af9ba4829ea2f1cb70b53
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "code_generator_x86_64.h" 18 19#include "entrypoints/quick/quick_entrypoints.h" 20#include "gc/accounting/card_table.h" 21#include "mirror/array.h" 22#include "mirror/art_method.h" 23#include "mirror/object_reference.h" 24#include "thread.h" 25#include "utils/assembler.h" 26#include "utils/x86_64/assembler_x86_64.h" 27#include "utils/x86_64/managed_register_x86_64.h" 28 29namespace art { 30 31x86_64::X86_64ManagedRegister Location::AsX86_64() const { 32 return reg().AsX86_64(); 33} 34 35namespace x86_64 { 36 37#define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())-> 38 39class NullCheckSlowPathX86_64 : public SlowPathCode { 40 public: 41 explicit NullCheckSlowPathX86_64(uint32_t dex_pc) : dex_pc_(dex_pc) {} 42 43 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 44 __ Bind(GetEntryLabel()); 45 __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pThrowNullPointer), true)); 46 codegen->RecordPcInfo(dex_pc_); 47 } 48 49 private: 50 const uint32_t dex_pc_; 51 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64); 52}; 53 54#undef __ 55#define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())-> 56 57inline Condition X86_64Condition(IfCondition cond) { 58 switch (cond) { 59 case kCondEQ: return kEqual; 60 case kCondNE: return kNotEqual; 61 case kCondLT: return kLess; 62 case kCondLE: return kLessEqual; 63 case kCondGT: return kGreater; 64 case kCondGE: return kGreaterEqual; 65 default: 66 LOG(FATAL) << "Unknown if condition"; 67 } 68 return kEqual; 69} 70 71// Some x86_64 instructions require a register to be available as temp. 72static constexpr Register TMP = R11; 73 74static constexpr int kNumberOfPushedRegistersAtEntry = 1; 75static constexpr int kCurrentMethodStackOffset = 0; 76 77void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const { 78 stream << X86_64ManagedRegister::FromCpuRegister(Register(reg)); 79} 80 81void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const { 82 stream << X86_64ManagedRegister::FromXmmRegister(FloatRegister(reg)); 83} 84 85static Location X86_64CpuLocation(Register reg) { 86 return Location::RegisterLocation(X86_64ManagedRegister::FromCpuRegister(reg)); 87} 88 89CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph) 90 : CodeGenerator(graph, kNumberOfRegIds), 91 location_builder_(graph, this), 92 instruction_visitor_(graph, this), 93 move_resolver_(graph->GetArena(), this) {} 94 95size_t CodeGeneratorX86_64::FrameEntrySpillSize() const { 96 return kNumberOfPushedRegistersAtEntry * kX86_64WordSize; 97} 98 99InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph, CodeGeneratorX86_64* codegen) 100 : HGraphVisitor(graph), 101 assembler_(codegen->GetAssembler()), 102 codegen_(codegen) {} 103 104ManagedRegister CodeGeneratorX86_64::AllocateFreeRegister(Primitive::Type type, 105 bool* blocked_registers) const { 106 switch (type) { 107 case Primitive::kPrimLong: 108 case Primitive::kPrimByte: 109 case Primitive::kPrimBoolean: 110 case Primitive::kPrimChar: 111 case Primitive::kPrimShort: 112 case Primitive::kPrimInt: 113 case Primitive::kPrimNot: { 114 size_t reg = AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters); 115 return X86_64ManagedRegister::FromCpuRegister(static_cast<Register>(reg)); 116 } 117 118 case Primitive::kPrimFloat: 119 case Primitive::kPrimDouble: 120 LOG(FATAL) << "Unimplemented register type " << type; 121 122 case Primitive::kPrimVoid: 123 LOG(FATAL) << "Unreachable type " << type; 124 } 125 126 return ManagedRegister::NoRegister(); 127} 128 129void CodeGeneratorX86_64::SetupBlockedRegisters(bool* blocked_registers) const { 130 // Stack register is always reserved. 131 blocked_registers[RSP] = true; 132 133 // Block the register used as TMP. 134 blocked_registers[TMP] = true; 135 136 // TODO: We currently don't use Quick's callee saved registers. 137 blocked_registers[RBX] = true; 138 blocked_registers[RBP] = true; 139 blocked_registers[R12] = true; 140 blocked_registers[R13] = true; 141 blocked_registers[R14] = true; 142 blocked_registers[R15] = true; 143} 144 145void CodeGeneratorX86_64::GenerateFrameEntry() { 146 // Create a fake register to mimic Quick. 147 static const int kFakeReturnRegister = 16; 148 core_spill_mask_ |= (1 << kFakeReturnRegister); 149 150 // The return PC has already been pushed on the stack. 151 __ subq(CpuRegister(RSP), Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize)); 152 __ movl(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI)); 153} 154 155void CodeGeneratorX86_64::GenerateFrameExit() { 156 __ addq(CpuRegister(RSP), 157 Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize)); 158} 159 160void CodeGeneratorX86_64::Bind(Label* label) { 161 __ Bind(label); 162} 163 164void InstructionCodeGeneratorX86_64::LoadCurrentMethod(CpuRegister reg) { 165 __ movl(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset)); 166} 167 168Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const { 169 switch (load->GetType()) { 170 case Primitive::kPrimLong: 171 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal())); 172 break; 173 174 case Primitive::kPrimInt: 175 case Primitive::kPrimNot: 176 return Location::StackSlot(GetStackSlot(load->GetLocal())); 177 178 case Primitive::kPrimFloat: 179 case Primitive::kPrimDouble: 180 LOG(FATAL) << "Unimplemented type " << load->GetType(); 181 182 case Primitive::kPrimBoolean: 183 case Primitive::kPrimByte: 184 case Primitive::kPrimChar: 185 case Primitive::kPrimShort: 186 case Primitive::kPrimVoid: 187 LOG(FATAL) << "Unexpected type " << load->GetType(); 188 } 189 190 LOG(FATAL) << "Unreachable"; 191 return Location(); 192} 193 194void CodeGeneratorX86_64::Move(Location destination, Location source) { 195 if (source.Equals(destination)) { 196 return; 197 } 198 if (destination.IsRegister()) { 199 if (source.IsRegister()) { 200 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister()); 201 } else if (source.IsStackSlot()) { 202 __ movl(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex())); 203 } else { 204 DCHECK(source.IsDoubleStackSlot()); 205 __ movq(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex())); 206 } 207 } else if (destination.IsStackSlot()) { 208 if (source.IsRegister()) { 209 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister()); 210 } else { 211 DCHECK(source.IsStackSlot()); 212 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex())); 213 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP)); 214 } 215 } else { 216 DCHECK(destination.IsDoubleStackSlot()); 217 if (source.IsRegister()) { 218 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister()); 219 } else { 220 DCHECK(source.IsDoubleStackSlot()); 221 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex())); 222 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP)); 223 } 224 } 225} 226 227void CodeGeneratorX86_64::Move(HInstruction* instruction, 228 Location location, 229 HInstruction* move_for) { 230 if (instruction->AsIntConstant() != nullptr) { 231 Immediate imm(instruction->AsIntConstant()->GetValue()); 232 if (location.IsRegister()) { 233 __ movl(location.AsX86_64().AsCpuRegister(), imm); 234 } else { 235 __ movl(Address(CpuRegister(RSP), location.GetStackIndex()), imm); 236 } 237 } else if (instruction->AsLongConstant() != nullptr) { 238 int64_t value = instruction->AsLongConstant()->GetValue(); 239 if (location.IsRegister()) { 240 __ movq(location.AsX86_64().AsCpuRegister(), Immediate(value)); 241 } else { 242 __ movq(CpuRegister(TMP), Immediate(value)); 243 __ movq(Address(CpuRegister(RSP), location.GetStackIndex()), CpuRegister(TMP)); 244 } 245 } else if (instruction->AsLoadLocal() != nullptr) { 246 switch (instruction->GetType()) { 247 case Primitive::kPrimBoolean: 248 case Primitive::kPrimByte: 249 case Primitive::kPrimChar: 250 case Primitive::kPrimShort: 251 case Primitive::kPrimInt: 252 case Primitive::kPrimNot: 253 Move(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal()))); 254 break; 255 256 case Primitive::kPrimLong: 257 Move(location, Location::DoubleStackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal()))); 258 break; 259 260 default: 261 LOG(FATAL) << "Unimplemented local type " << instruction->GetType(); 262 } 263 } else { 264 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary()); 265 switch (instruction->GetType()) { 266 case Primitive::kPrimBoolean: 267 case Primitive::kPrimByte: 268 case Primitive::kPrimChar: 269 case Primitive::kPrimShort: 270 case Primitive::kPrimInt: 271 case Primitive::kPrimNot: 272 case Primitive::kPrimLong: 273 Move(location, instruction->GetLocations()->Out()); 274 break; 275 276 default: 277 LOG(FATAL) << "Unimplemented type " << instruction->GetType(); 278 } 279 } 280} 281 282void LocationsBuilderX86_64::VisitGoto(HGoto* got) { 283 got->SetLocations(nullptr); 284} 285 286void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) { 287 HBasicBlock* successor = got->GetSuccessor(); 288 if (GetGraph()->GetExitBlock() == successor) { 289 codegen_->GenerateFrameExit(); 290 } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) { 291 __ jmp(codegen_->GetLabelOf(successor)); 292 } 293} 294 295void LocationsBuilderX86_64::VisitExit(HExit* exit) { 296 exit->SetLocations(nullptr); 297} 298 299void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) { 300 if (kIsDebugBuild) { 301 __ Comment("Unreachable"); 302 __ int3(); 303 } 304} 305 306void LocationsBuilderX86_64::VisitIf(HIf* if_instr) { 307 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr); 308 HInstruction* cond = if_instr->InputAt(0); 309 DCHECK(cond->IsCondition()); 310 HCondition* condition = cond->AsCondition(); 311 if (condition->NeedsMaterialization()) { 312 locations->SetInAt(0, Location::Any()); 313 } 314 if_instr->SetLocations(locations); 315} 316 317void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) { 318 HInstruction* cond = if_instr->InputAt(0); 319 DCHECK(cond->IsCondition()); 320 HCondition* condition = cond->AsCondition(); 321 if (condition->NeedsMaterialization()) { 322 // Materialized condition, compare against 0. 323 Location lhs = if_instr->GetLocations()->InAt(0); 324 if (lhs.IsRegister()) { 325 __ cmpl(lhs.AsX86_64().AsCpuRegister(), Immediate(0)); 326 } else { 327 __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0)); 328 } 329 __ j(kEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor())); 330 } else { 331 Location lhs = condition->GetLocations()->InAt(0); 332 Location rhs = condition->GetLocations()->InAt(1); 333 if (rhs.IsRegister()) { 334 __ cmpl(lhs.AsX86_64().AsCpuRegister(), rhs.AsX86_64().AsCpuRegister()); 335 } else if (rhs.IsConstant()) { 336 __ cmpl(lhs.AsX86_64().AsCpuRegister(), 337 Immediate(rhs.GetConstant()->AsIntConstant()->GetValue())); 338 } else { 339 __ cmpl(lhs.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), rhs.GetStackIndex())); 340 } 341 __ j(X86_64Condition(condition->GetCondition()), 342 codegen_->GetLabelOf(if_instr->IfTrueSuccessor())); 343 } 344 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) { 345 __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor())); 346 } 347} 348 349void LocationsBuilderX86_64::VisitLocal(HLocal* local) { 350 local->SetLocations(nullptr); 351} 352 353void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) { 354 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock()); 355} 356 357void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) { 358 local->SetLocations(nullptr); 359} 360 361void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) { 362 // Nothing to do, this is driven by the code generator. 363} 364 365void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) { 366 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store); 367 switch (store->InputAt(1)->GetType()) { 368 case Primitive::kPrimBoolean: 369 case Primitive::kPrimByte: 370 case Primitive::kPrimChar: 371 case Primitive::kPrimShort: 372 case Primitive::kPrimInt: 373 case Primitive::kPrimNot: 374 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal()))); 375 break; 376 377 case Primitive::kPrimLong: 378 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal()))); 379 break; 380 381 default: 382 LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType(); 383 } 384 store->SetLocations(locations); 385} 386 387void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) { 388} 389 390void LocationsBuilderX86_64::VisitCondition(HCondition* comp) { 391 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp); 392 locations->SetInAt(0, Location::RequiresRegister()); 393 locations->SetInAt(1, Location::Any()); 394 if (comp->NeedsMaterialization()) { 395 locations->SetOut(Location::RequiresRegister()); 396 } 397 comp->SetLocations(locations); 398} 399 400void InstructionCodeGeneratorX86_64::VisitCondition(HCondition* comp) { 401 if (comp->NeedsMaterialization()) { 402 LocationSummary* locations = comp->GetLocations(); 403 if (locations->InAt(1).IsRegister()) { 404 __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(), 405 locations->InAt(1).AsX86_64().AsCpuRegister()); 406 } else if (locations->InAt(1).IsConstant()) { 407 __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(), 408 Immediate(locations->InAt(1).GetConstant()->AsIntConstant()->GetValue())); 409 } else { 410 __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(), 411 Address(CpuRegister(RSP), locations->InAt(1).GetStackIndex())); 412 } 413 __ setcc(X86_64Condition(comp->GetCondition()), 414 comp->GetLocations()->Out().AsX86_64().AsCpuRegister()); 415 } 416} 417 418void LocationsBuilderX86_64::VisitEqual(HEqual* comp) { 419 VisitCondition(comp); 420} 421 422void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) { 423 VisitCondition(comp); 424} 425 426void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) { 427 VisitCondition(comp); 428} 429 430void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) { 431 VisitCondition(comp); 432} 433 434void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) { 435 VisitCondition(comp); 436} 437 438void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) { 439 VisitCondition(comp); 440} 441 442void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 443 VisitCondition(comp); 444} 445 446void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 447 VisitCondition(comp); 448} 449 450void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) { 451 VisitCondition(comp); 452} 453 454void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) { 455 VisitCondition(comp); 456} 457 458void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 459 VisitCondition(comp); 460} 461 462void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 463 VisitCondition(comp); 464} 465 466void LocationsBuilderX86_64::VisitCompare(HCompare* compare) { 467 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare); 468 locations->SetInAt(0, Location::RequiresRegister()); 469 locations->SetInAt(1, Location::RequiresRegister()); 470 locations->SetOut(Location::RequiresRegister()); 471 compare->SetLocations(locations); 472} 473 474void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) { 475 Label greater, done; 476 LocationSummary* locations = compare->GetLocations(); 477 switch (compare->InputAt(0)->GetType()) { 478 case Primitive::kPrimLong: 479 __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(), 480 locations->InAt(1).AsX86_64().AsCpuRegister()); 481 break; 482 default: 483 LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType(); 484 } 485 486 __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(0)); 487 __ j(kEqual, &done); 488 __ j(kGreater, &greater); 489 490 __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(-1)); 491 __ jmp(&done); 492 493 __ Bind(&greater); 494 __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1)); 495 496 __ Bind(&done); 497} 498 499void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) { 500 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant); 501 locations->SetOut(Location::ConstantLocation(constant)); 502 constant->SetLocations(locations); 503} 504 505void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) { 506} 507 508void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) { 509 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant); 510 locations->SetOut(Location::ConstantLocation(constant)); 511 constant->SetLocations(locations); 512} 513 514void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) { 515} 516 517void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) { 518 ret->SetLocations(nullptr); 519} 520 521void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) { 522 codegen_->GenerateFrameExit(); 523 __ ret(); 524} 525 526void LocationsBuilderX86_64::VisitReturn(HReturn* ret) { 527 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret); 528 switch (ret->InputAt(0)->GetType()) { 529 case Primitive::kPrimBoolean: 530 case Primitive::kPrimByte: 531 case Primitive::kPrimChar: 532 case Primitive::kPrimShort: 533 case Primitive::kPrimInt: 534 case Primitive::kPrimNot: 535 case Primitive::kPrimLong: 536 locations->SetInAt(0, X86_64CpuLocation(RAX)); 537 break; 538 539 default: 540 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType(); 541 } 542 ret->SetLocations(locations); 543} 544 545void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) { 546 if (kIsDebugBuild) { 547 switch (ret->InputAt(0)->GetType()) { 548 case Primitive::kPrimBoolean: 549 case Primitive::kPrimByte: 550 case Primitive::kPrimChar: 551 case Primitive::kPrimShort: 552 case Primitive::kPrimInt: 553 case Primitive::kPrimNot: 554 case Primitive::kPrimLong: 555 DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), RAX); 556 break; 557 558 default: 559 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType(); 560 } 561 } 562 codegen_->GenerateFrameExit(); 563 __ ret(); 564} 565 566static constexpr Register kRuntimeParameterCoreRegisters[] = { RDI, RSI, RDX }; 567static constexpr size_t kRuntimeParameterCoreRegistersLength = 568 arraysize(kRuntimeParameterCoreRegisters); 569 570class InvokeRuntimeCallingConvention : public CallingConvention<Register> { 571 public: 572 InvokeRuntimeCallingConvention() 573 : CallingConvention(kRuntimeParameterCoreRegisters, 574 kRuntimeParameterCoreRegistersLength) {} 575 576 private: 577 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention); 578}; 579 580Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) { 581 switch (type) { 582 case Primitive::kPrimBoolean: 583 case Primitive::kPrimByte: 584 case Primitive::kPrimChar: 585 case Primitive::kPrimShort: 586 case Primitive::kPrimInt: 587 case Primitive::kPrimNot: { 588 uint32_t index = gp_index_++; 589 stack_index_++; 590 if (index < calling_convention.GetNumberOfRegisters()) { 591 return X86_64CpuLocation(calling_convention.GetRegisterAt(index)); 592 } else { 593 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1)); 594 } 595 } 596 597 case Primitive::kPrimLong: { 598 uint32_t index = gp_index_; 599 stack_index_ += 2; 600 if (index < calling_convention.GetNumberOfRegisters()) { 601 gp_index_ += 1; 602 return X86_64CpuLocation(calling_convention.GetRegisterAt(index)); 603 } else { 604 gp_index_ += 2; 605 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2)); 606 } 607 } 608 609 case Primitive::kPrimDouble: 610 case Primitive::kPrimFloat: 611 LOG(FATAL) << "Unimplemented parameter type " << type; 612 break; 613 614 case Primitive::kPrimVoid: 615 LOG(FATAL) << "Unexpected parameter type " << type; 616 break; 617 } 618 return Location(); 619} 620 621void LocationsBuilderX86_64::VisitInvokeStatic(HInvokeStatic* invoke) { 622 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke); 623 locations->AddTemp(X86_64CpuLocation(RDI)); 624 625 InvokeDexCallingConventionVisitor calling_convention_visitor; 626 for (size_t i = 0; i < invoke->InputCount(); ++i) { 627 HInstruction* input = invoke->InputAt(i); 628 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType())); 629 } 630 631 switch (invoke->GetType()) { 632 case Primitive::kPrimBoolean: 633 case Primitive::kPrimByte: 634 case Primitive::kPrimChar: 635 case Primitive::kPrimShort: 636 case Primitive::kPrimInt: 637 case Primitive::kPrimNot: 638 case Primitive::kPrimLong: 639 locations->SetOut(X86_64CpuLocation(RAX)); 640 break; 641 642 case Primitive::kPrimVoid: 643 break; 644 645 case Primitive::kPrimDouble: 646 case Primitive::kPrimFloat: 647 LOG(FATAL) << "Unimplemented return type " << invoke->GetType(); 648 break; 649 } 650 651 invoke->SetLocations(locations); 652} 653 654void InstructionCodeGeneratorX86_64::VisitInvokeStatic(HInvokeStatic* invoke) { 655 CpuRegister temp = invoke->GetLocations()->GetTemp(0).AsX86_64().AsCpuRegister(); 656 uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>); 657 size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).SizeValue() + 658 invoke->GetIndexInDexCache() * heap_reference_size; 659 660 // TODO: Implement all kinds of calls: 661 // 1) boot -> boot 662 // 2) app -> boot 663 // 3) app -> app 664 // 665 // Currently we implement the app -> app logic, which looks up in the resolve cache. 666 667 // temp = method; 668 LoadCurrentMethod(temp); 669 // temp = temp->dex_cache_resolved_methods_; 670 __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue())); 671 // temp = temp[index_in_cache] 672 __ movl(temp, Address(temp, index_in_cache)); 673 // (temp + offset_of_quick_compiled_code)() 674 __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue())); 675 676 codegen_->RecordPcInfo(invoke->GetDexPc()); 677} 678 679void LocationsBuilderX86_64::VisitAdd(HAdd* add) { 680 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add); 681 switch (add->GetResultType()) { 682 case Primitive::kPrimInt: { 683 locations->SetInAt(0, Location::RequiresRegister()); 684 locations->SetInAt(1, Location::Any()); 685 locations->SetOut(Location::SameAsFirstInput()); 686 break; 687 } 688 case Primitive::kPrimLong: { 689 locations->SetInAt(0, Location::RequiresRegister()); 690 locations->SetInAt(1, Location::RequiresRegister()); 691 locations->SetOut(Location::SameAsFirstInput()); 692 break; 693 } 694 695 case Primitive::kPrimBoolean: 696 case Primitive::kPrimByte: 697 case Primitive::kPrimChar: 698 case Primitive::kPrimShort: 699 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 700 break; 701 702 default: 703 LOG(FATAL) << "Unimplemented add type " << add->GetResultType(); 704 } 705 add->SetLocations(locations); 706} 707 708void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) { 709 LocationSummary* locations = add->GetLocations(); 710 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), 711 locations->Out().AsX86_64().AsCpuRegister().AsRegister()); 712 switch (add->GetResultType()) { 713 case Primitive::kPrimInt: { 714 if (locations->InAt(1).IsRegister()) { 715 __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(), 716 locations->InAt(1).AsX86_64().AsCpuRegister()); 717 } else if (locations->InAt(1).IsConstant()) { 718 HConstant* instruction = locations->InAt(1).GetConstant(); 719 Immediate imm(instruction->AsIntConstant()->GetValue()); 720 __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(), imm); 721 } else { 722 __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(), 723 Address(CpuRegister(RSP), locations->InAt(1).GetStackIndex())); 724 } 725 break; 726 } 727 case Primitive::kPrimLong: { 728 __ addq(locations->InAt(0).AsX86_64().AsCpuRegister(), 729 locations->InAt(1).AsX86_64().AsCpuRegister()); 730 break; 731 } 732 733 case Primitive::kPrimBoolean: 734 case Primitive::kPrimByte: 735 case Primitive::kPrimChar: 736 case Primitive::kPrimShort: 737 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 738 break; 739 740 default: 741 LOG(FATAL) << "Unimplemented add type " << add->GetResultType(); 742 } 743} 744 745void LocationsBuilderX86_64::VisitSub(HSub* sub) { 746 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub); 747 switch (sub->GetResultType()) { 748 case Primitive::kPrimInt: { 749 locations->SetInAt(0, Location::RequiresRegister()); 750 locations->SetInAt(1, Location::Any()); 751 locations->SetOut(Location::SameAsFirstInput()); 752 break; 753 } 754 case Primitive::kPrimLong: { 755 locations->SetInAt(0, Location::RequiresRegister()); 756 locations->SetInAt(1, Location::RequiresRegister()); 757 locations->SetOut(Location::SameAsFirstInput()); 758 break; 759 } 760 761 case Primitive::kPrimBoolean: 762 case Primitive::kPrimByte: 763 case Primitive::kPrimChar: 764 case Primitive::kPrimShort: 765 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 766 break; 767 768 default: 769 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType(); 770 } 771 sub->SetLocations(locations); 772} 773 774void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) { 775 LocationSummary* locations = sub->GetLocations(); 776 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), 777 locations->Out().AsX86_64().AsCpuRegister().AsRegister()); 778 switch (sub->GetResultType()) { 779 case Primitive::kPrimInt: { 780 if (locations->InAt(1).IsRegister()) { 781 __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(), 782 locations->InAt(1).AsX86_64().AsCpuRegister()); 783 } else if (locations->InAt(1).IsConstant()) { 784 HConstant* instruction = locations->InAt(1).GetConstant(); 785 Immediate imm(instruction->AsIntConstant()->GetValue()); 786 __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(), imm); 787 } else { 788 __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(), 789 Address(CpuRegister(RSP), locations->InAt(1).GetStackIndex())); 790 } 791 break; 792 } 793 case Primitive::kPrimLong: { 794 __ subq(locations->InAt(0).AsX86_64().AsCpuRegister(), 795 locations->InAt(1).AsX86_64().AsCpuRegister()); 796 break; 797 } 798 799 case Primitive::kPrimBoolean: 800 case Primitive::kPrimByte: 801 case Primitive::kPrimChar: 802 case Primitive::kPrimShort: 803 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 804 break; 805 806 default: 807 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType(); 808 } 809} 810 811void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) { 812 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 813 locations->SetOut(X86_64CpuLocation(RAX)); 814 instruction->SetLocations(locations); 815} 816 817void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) { 818 InvokeRuntimeCallingConvention calling_convention; 819 LoadCurrentMethod(CpuRegister(calling_convention.GetRegisterAt(1))); 820 __ movq(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(instruction->GetTypeIndex())); 821 822 __ gs()->call(Address::Absolute( 823 QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocObjectWithAccessCheck), true)); 824 825 codegen_->RecordPcInfo(instruction->GetDexPc()); 826} 827 828void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) { 829 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 830 Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); 831 if (location.IsStackSlot()) { 832 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 833 } else if (location.IsDoubleStackSlot()) { 834 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 835 } 836 locations->SetOut(location); 837 instruction->SetLocations(locations); 838} 839 840void InstructionCodeGeneratorX86_64::VisitParameterValue(HParameterValue* instruction) { 841 // Nothing to do, the parameter is already at its location. 842} 843 844void LocationsBuilderX86_64::VisitNot(HNot* instruction) { 845 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 846 locations->SetInAt(0, Location::RequiresRegister()); 847 locations->SetOut(Location::SameAsFirstInput()); 848 instruction->SetLocations(locations); 849} 850 851void InstructionCodeGeneratorX86_64::VisitNot(HNot* instruction) { 852 LocationSummary* locations = instruction->GetLocations(); 853 DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), 854 locations->Out().AsX86_64().AsCpuRegister().AsRegister()); 855 __ xorq(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1)); 856} 857 858void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) { 859 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 860 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) { 861 locations->SetInAt(i, Location::Any()); 862 } 863 locations->SetOut(Location::Any()); 864 instruction->SetLocations(locations); 865} 866 867void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) { 868 LOG(FATAL) << "Unimplemented"; 869} 870 871void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 872 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 873 locations->SetInAt(0, Location::RequiresRegister()); 874 locations->SetInAt(1, Location::RequiresRegister()); 875 // Temporary registers for the write barrier. 876 if (instruction->InputAt(1)->GetType() == Primitive::kPrimNot) { 877 locations->AddTemp(Location::RequiresRegister()); 878 locations->AddTemp(Location::RequiresRegister()); 879 } 880 instruction->SetLocations(locations); 881} 882 883void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 884 LocationSummary* locations = instruction->GetLocations(); 885 CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister(); 886 CpuRegister value = locations->InAt(1).AsX86_64().AsCpuRegister(); 887 size_t offset = instruction->GetFieldOffset().SizeValue(); 888 Primitive::Type field_type = instruction->InputAt(1)->GetType(); 889 890 switch (field_type) { 891 case Primitive::kPrimBoolean: 892 case Primitive::kPrimByte: { 893 __ movb(Address(obj, offset), value); 894 break; 895 } 896 897 case Primitive::kPrimShort: 898 case Primitive::kPrimChar: { 899 __ movw(Address(obj, offset), value); 900 break; 901 } 902 903 case Primitive::kPrimInt: { 904 __ movl(Address(obj, offset), value); 905 break; 906 } 907 908 case Primitive::kPrimNot: { 909 __ movl(Address(obj, offset), value); 910 Label is_null; 911 CpuRegister temp = locations->GetTemp(0).AsX86_64().AsCpuRegister(); 912 CpuRegister card = locations->GetTemp(1).AsX86_64().AsCpuRegister(); 913 __ testl(value, value); 914 __ j(kEqual, &is_null); 915 __ gs()->movq(card, Address::Absolute( 916 Thread::CardTableOffset<kX86_64WordSize>().Int32Value(), true)); 917 __ movq(temp, obj); 918 __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift)); 919 __ movb(Address(temp, card, TIMES_1, 0), card); 920 __ Bind(&is_null); 921 break; 922 } 923 924 case Primitive::kPrimLong: { 925 __ movq(Address(obj, offset), value); 926 break; 927 } 928 929 case Primitive::kPrimFloat: 930 case Primitive::kPrimDouble: 931 LOG(FATAL) << "Unimplemented register type " << field_type; 932 933 case Primitive::kPrimVoid: 934 LOG(FATAL) << "Unreachable type " << field_type; 935 } 936} 937 938void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 939 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 940 locations->SetInAt(0, Location::RequiresRegister()); 941 locations->SetOut(Location::RequiresRegister()); 942 instruction->SetLocations(locations); 943} 944 945void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 946 LocationSummary* locations = instruction->GetLocations(); 947 CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister(); 948 CpuRegister out = locations->Out().AsX86_64().AsCpuRegister(); 949 size_t offset = instruction->GetFieldOffset().SizeValue(); 950 951 switch (instruction->GetType()) { 952 case Primitive::kPrimBoolean: { 953 __ movzxb(out, Address(obj, offset)); 954 break; 955 } 956 957 case Primitive::kPrimByte: { 958 __ movsxb(out, Address(obj, offset)); 959 break; 960 } 961 962 case Primitive::kPrimShort: { 963 __ movsxw(out, Address(obj, offset)); 964 break; 965 } 966 967 case Primitive::kPrimChar: { 968 __ movzxw(out, Address(obj, offset)); 969 break; 970 } 971 972 case Primitive::kPrimInt: 973 case Primitive::kPrimNot: { 974 __ movl(out, Address(obj, offset)); 975 break; 976 } 977 978 case Primitive::kPrimLong: { 979 __ movq(out, Address(obj, offset)); 980 break; 981 } 982 983 case Primitive::kPrimFloat: 984 case Primitive::kPrimDouble: 985 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 986 987 case Primitive::kPrimVoid: 988 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 989 } 990} 991 992void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) { 993 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 994 locations->SetInAt(0, Location::Any()); 995 // TODO: Have a normalization phase that makes this instruction never used. 996 locations->SetOut(Location::SameAsFirstInput()); 997 instruction->SetLocations(locations); 998} 999 1000void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) { 1001 SlowPathCode* slow_path = 1002 new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction->GetDexPc()); 1003 codegen_->AddSlowPath(slow_path); 1004 1005 LocationSummary* locations = instruction->GetLocations(); 1006 Location obj = locations->InAt(0); 1007 DCHECK(obj.Equals(locations->Out())); 1008 1009 if (obj.IsRegister()) { 1010 __ cmpl(obj.AsX86_64().AsCpuRegister(), Immediate(0)); 1011 } else { 1012 DCHECK(locations->InAt(0).IsStackSlot()); 1013 __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0)); 1014 } 1015 __ j(kEqual, slow_path->GetEntryLabel()); 1016} 1017 1018void LocationsBuilderX86_64::VisitTemporary(HTemporary* temp) { 1019 temp->SetLocations(nullptr); 1020} 1021 1022void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp) { 1023 // Nothing to do, this is driven by the code generator. 1024} 1025 1026void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) { 1027 LOG(FATAL) << "Unimplemented"; 1028} 1029 1030void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) { 1031 codegen_->GetMoveResolver()->EmitNativeCode(instruction); 1032} 1033 1034X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const { 1035 return codegen_->GetAssembler(); 1036} 1037 1038void ParallelMoveResolverX86_64::EmitMove(size_t index) { 1039 MoveOperands* move = moves_.Get(index); 1040 Location source = move->GetSource(); 1041 Location destination = move->GetDestination(); 1042 1043 if (source.IsRegister()) { 1044 if (destination.IsRegister()) { 1045 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister()); 1046 } else if (destination.IsStackSlot()) { 1047 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), 1048 source.AsX86_64().AsCpuRegister()); 1049 } else { 1050 DCHECK(destination.IsDoubleStackSlot()); 1051 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), 1052 source.AsX86_64().AsCpuRegister()); 1053 } 1054 } else if (source.IsStackSlot()) { 1055 if (destination.IsRegister()) { 1056 __ movl(destination.AsX86_64().AsX86_64().AsCpuRegister(), 1057 Address(CpuRegister(RSP), source.GetStackIndex())); 1058 } else { 1059 DCHECK(destination.IsStackSlot()); 1060 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex())); 1061 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP)); 1062 } 1063 } else if (source.IsDoubleStackSlot()) { 1064 if (destination.IsRegister()) { 1065 __ movq(destination.AsX86_64().AsX86_64().AsCpuRegister(), 1066 Address(CpuRegister(RSP), source.GetStackIndex())); 1067 } else { 1068 DCHECK(destination.IsDoubleStackSlot()); 1069 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex())); 1070 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP)); 1071 } 1072 } else if (source.IsConstant()) { 1073 HConstant* constant = source.GetConstant(); 1074 if (constant->IsIntConstant()) { 1075 Immediate imm(constant->AsIntConstant()->GetValue()); 1076 if (destination.IsRegister()) { 1077 __ movl(destination.AsX86_64().AsCpuRegister(), imm); 1078 } else { 1079 __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm); 1080 } 1081 } else if (constant->IsLongConstant()) { 1082 int64_t value = constant->AsLongConstant()->GetValue(); 1083 if (destination.IsRegister()) { 1084 __ movq(destination.AsX86_64().AsCpuRegister(), Immediate(value)); 1085 } else { 1086 __ movq(CpuRegister(TMP), Immediate(value)); 1087 __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP)); 1088 } 1089 } else { 1090 LOG(FATAL) << "Unimplemented constant type"; 1091 } 1092 } else { 1093 LOG(FATAL) << "Unimplemented"; 1094 } 1095} 1096 1097void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) { 1098 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem)); 1099 __ movl(Address(CpuRegister(RSP), mem), reg); 1100 __ movl(reg, CpuRegister(TMP)); 1101} 1102 1103void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) { 1104 ScratchRegisterScope ensure_scratch( 1105 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters()); 1106 1107 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0; 1108 __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset)); 1109 __ movl(CpuRegister(ensure_scratch.GetRegister()), 1110 Address(CpuRegister(RSP), mem2 + stack_offset)); 1111 __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP)); 1112 __ movl(Address(CpuRegister(RSP), mem1 + stack_offset), 1113 CpuRegister(ensure_scratch.GetRegister())); 1114} 1115 1116void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) { 1117 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem)); 1118 __ movq(Address(CpuRegister(RSP), mem), reg); 1119 __ movq(reg, CpuRegister(TMP)); 1120} 1121 1122void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) { 1123 ScratchRegisterScope ensure_scratch( 1124 this, TMP, RAX, codegen_->GetNumberOfCoreRegisters()); 1125 1126 int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0; 1127 __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset)); 1128 __ movq(CpuRegister(ensure_scratch.GetRegister()), 1129 Address(CpuRegister(RSP), mem2 + stack_offset)); 1130 __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP)); 1131 __ movq(Address(CpuRegister(RSP), mem1 + stack_offset), 1132 CpuRegister(ensure_scratch.GetRegister())); 1133} 1134 1135void ParallelMoveResolverX86_64::EmitSwap(size_t index) { 1136 MoveOperands* move = moves_.Get(index); 1137 Location source = move->GetSource(); 1138 Location destination = move->GetDestination(); 1139 1140 if (source.IsRegister() && destination.IsRegister()) { 1141 __ xchgq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister()); 1142 } else if (source.IsRegister() && destination.IsStackSlot()) { 1143 Exchange32(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex()); 1144 } else if (source.IsStackSlot() && destination.IsRegister()) { 1145 Exchange32(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex()); 1146 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 1147 Exchange32(destination.GetStackIndex(), source.GetStackIndex()); 1148 } else if (source.IsRegister() && destination.IsDoubleStackSlot()) { 1149 Exchange64(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex()); 1150 } else if (source.IsDoubleStackSlot() && destination.IsRegister()) { 1151 Exchange64(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex()); 1152 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) { 1153 Exchange64(destination.GetStackIndex(), source.GetStackIndex()); 1154 } else { 1155 LOG(FATAL) << "Unimplemented"; 1156 } 1157} 1158 1159 1160void ParallelMoveResolverX86_64::SpillScratch(int reg) { 1161 __ pushq(CpuRegister(reg)); 1162} 1163 1164 1165void ParallelMoveResolverX86_64::RestoreScratch(int reg) { 1166 __ popq(CpuRegister(reg)); 1167} 1168 1169} // namespace x86_64 1170} // namespace art 1171