code_generator_x86.cc revision a7062e05e6048c7f817d784a5b94e3122e25b1ec
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "code_generator_x86.h" 18#include "utils/assembler.h" 19#include "utils/x86/assembler_x86.h" 20#include "utils/x86/managed_register_x86.h" 21 22#include "entrypoints/quick/quick_entrypoints.h" 23#include "mirror/array.h" 24#include "mirror/art_method.h" 25#include "thread.h" 26 27#define __ reinterpret_cast<X86Assembler*>(GetAssembler())-> 28 29namespace art { 30 31x86::X86ManagedRegister Location::AsX86() const { 32 return reg().AsX86(); 33} 34 35namespace x86 { 36 37static constexpr int kNumberOfPushedRegistersAtEntry = 1; 38static constexpr int kCurrentMethodStackOffset = 0; 39 40void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const { 41 stream << X86ManagedRegister::FromCpuRegister(Register(reg)); 42} 43 44void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const { 45 stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg)); 46} 47 48CodeGeneratorX86::CodeGeneratorX86(HGraph* graph) 49 : CodeGenerator(graph, kNumberOfRegIds), 50 location_builder_(graph, this), 51 instruction_visitor_(graph, this) {} 52 53static bool* GetBlockedRegisterPairs(bool* blocked_registers) { 54 return blocked_registers + kNumberOfAllocIds; 55} 56 57ManagedRegister CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type, 58 bool* blocked_registers) const { 59 switch (type) { 60 case Primitive::kPrimLong: { 61 size_t reg = AllocateFreeRegisterInternal( 62 GetBlockedRegisterPairs(blocked_registers), kNumberOfRegisterPairs); 63 X86ManagedRegister pair = 64 X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg)); 65 blocked_registers[pair.AsRegisterPairLow()] = true; 66 blocked_registers[pair.AsRegisterPairHigh()] = true; 67 return pair; 68 } 69 70 case Primitive::kPrimByte: 71 case Primitive::kPrimBoolean: 72 case Primitive::kPrimChar: 73 case Primitive::kPrimShort: 74 case Primitive::kPrimInt: 75 case Primitive::kPrimNot: { 76 size_t reg = AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters); 77 return X86ManagedRegister::FromCpuRegister(static_cast<Register>(reg)); 78 } 79 80 case Primitive::kPrimFloat: 81 case Primitive::kPrimDouble: 82 LOG(FATAL) << "Unimplemented register type " << type; 83 84 case Primitive::kPrimVoid: 85 LOG(FATAL) << "Unreachable type " << type; 86 } 87 88 return ManagedRegister::NoRegister(); 89} 90 91void CodeGeneratorX86::SetupBlockedRegisters(bool* blocked_registers) const { 92 bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers); 93 94 // Don't allocate the dalvik style register pair passing. 95 blocked_register_pairs[ECX_EDX] = true; 96 97 // Stack register is always reserved. 98 blocked_registers[ESP] = true; 99 100 // TODO: We currently don't use Quick's callee saved registers. 101 blocked_registers[EBP] = true; 102 blocked_registers[ESI] = true; 103 blocked_registers[EDI] = true; 104 blocked_register_pairs[EAX_EDI] = true; 105 blocked_register_pairs[EDX_EDI] = true; 106 blocked_register_pairs[ECX_EDI] = true; 107 blocked_register_pairs[EBX_EDI] = true; 108} 109 110size_t CodeGeneratorX86::GetNumberOfRegisters() const { 111 return kNumberOfRegIds; 112} 113 114static Location X86CpuLocation(Register reg) { 115 return Location::RegisterLocation(X86ManagedRegister::FromCpuRegister(reg)); 116} 117 118InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen) 119 : HGraphVisitor(graph), 120 assembler_(codegen->GetAssembler()), 121 codegen_(codegen) {} 122 123void CodeGeneratorX86::GenerateFrameEntry() { 124 // Create a fake register to mimic Quick. 125 static const int kFakeReturnRegister = 8; 126 core_spill_mask_ |= (1 << kFakeReturnRegister); 127 128 SetFrameSize(RoundUp( 129 (GetGraph()->GetMaximumNumberOfOutVRegs() + GetGraph()->GetNumberOfVRegs()) * kVRegSize 130 + kVRegSize // filler 131 + kX86WordSize // Art method 132 + kNumberOfPushedRegistersAtEntry * kX86WordSize, 133 kStackAlignment)); 134 135 // The return PC has already been pushed on the stack. 136 __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize)); 137 __ movl(Address(ESP, kCurrentMethodStackOffset), EAX); 138} 139 140void CodeGeneratorX86::GenerateFrameExit() { 141 __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize)); 142} 143 144void CodeGeneratorX86::Bind(Label* label) { 145 __ Bind(label); 146} 147 148void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) { 149 __ movl(reg, Address(ESP, kCurrentMethodStackOffset)); 150} 151 152int32_t CodeGeneratorX86::GetStackSlot(HLocal* local) const { 153 uint16_t reg_number = local->GetRegNumber(); 154 uint16_t number_of_vregs = GetGraph()->GetNumberOfVRegs(); 155 uint16_t number_of_in_vregs = GetGraph()->GetNumberOfInVRegs(); 156 if (reg_number >= number_of_vregs - number_of_in_vregs) { 157 // Local is a parameter of the method. It is stored in the caller's frame. 158 return GetFrameSize() + kX86WordSize // ART method 159 + (reg_number - number_of_vregs + number_of_in_vregs) * kVRegSize; 160 } else { 161 // Local is a temporary in this method. It is stored in this method's frame. 162 return GetFrameSize() - (kNumberOfPushedRegistersAtEntry * kX86WordSize) 163 - kVRegSize // filler. 164 - (number_of_vregs * kVRegSize) 165 + (reg_number * kVRegSize); 166 } 167} 168 169 170Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const { 171 switch (load->GetType()) { 172 case Primitive::kPrimLong: 173 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal())); 174 break; 175 176 case Primitive::kPrimInt: 177 case Primitive::kPrimNot: 178 return Location::StackSlot(GetStackSlot(load->GetLocal())); 179 180 case Primitive::kPrimFloat: 181 case Primitive::kPrimDouble: 182 LOG(FATAL) << "Unimplemented type " << load->GetType(); 183 184 case Primitive::kPrimBoolean: 185 case Primitive::kPrimByte: 186 case Primitive::kPrimChar: 187 case Primitive::kPrimShort: 188 case Primitive::kPrimVoid: 189 LOG(FATAL) << "Unexpected type " << load->GetType(); 190 } 191 192 LOG(FATAL) << "Unreachable"; 193 return Location(); 194} 195 196static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX }; 197static constexpr size_t kRuntimeParameterCoreRegistersLength = 198 arraysize(kRuntimeParameterCoreRegisters); 199 200class InvokeRuntimeCallingConvention : public CallingConvention<Register> { 201 public: 202 InvokeRuntimeCallingConvention() 203 : CallingConvention(kRuntimeParameterCoreRegisters, 204 kRuntimeParameterCoreRegistersLength) {} 205 206 private: 207 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention); 208}; 209 210Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) { 211 switch (type) { 212 case Primitive::kPrimBoolean: 213 case Primitive::kPrimByte: 214 case Primitive::kPrimChar: 215 case Primitive::kPrimShort: 216 case Primitive::kPrimInt: 217 case Primitive::kPrimNot: { 218 uint32_t index = gp_index_++; 219 if (index < calling_convention.GetNumberOfRegisters()) { 220 return X86CpuLocation(calling_convention.GetRegisterAt(index)); 221 } else { 222 return Location::StackSlot(calling_convention.GetStackOffsetOf(index, kX86WordSize)); 223 } 224 } 225 226 case Primitive::kPrimLong: { 227 uint32_t index = gp_index_; 228 gp_index_ += 2; 229 if (index + 1 < calling_convention.GetNumberOfRegisters()) { 230 return Location::RegisterLocation(X86ManagedRegister::FromRegisterPair( 231 calling_convention.GetRegisterPairAt(index))); 232 } else if (index + 1 == calling_convention.GetNumberOfRegisters()) { 233 return Location::QuickParameter(index); 234 } else { 235 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index, kX86WordSize)); 236 } 237 } 238 239 case Primitive::kPrimDouble: 240 case Primitive::kPrimFloat: 241 LOG(FATAL) << "Unimplemented parameter type " << type; 242 break; 243 244 case Primitive::kPrimVoid: 245 LOG(FATAL) << "Unexpected parameter type " << type; 246 break; 247 } 248 return Location(); 249} 250 251void CodeGeneratorX86::Move32(Location destination, Location source) { 252 if (source.Equals(destination)) { 253 return; 254 } 255 if (destination.IsRegister()) { 256 if (source.IsRegister()) { 257 __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister()); 258 } else { 259 DCHECK(source.IsStackSlot()); 260 __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex())); 261 } 262 } else { 263 if (source.IsRegister()) { 264 __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister()); 265 } else { 266 DCHECK(source.IsStackSlot()); 267 __ movl(EAX, Address(ESP, source.GetStackIndex())); 268 __ movl(Address(ESP, destination.GetStackIndex()), EAX); 269 } 270 } 271} 272 273void CodeGeneratorX86::Move64(Location destination, Location source) { 274 if (source.Equals(destination)) { 275 return; 276 } 277 if (destination.IsRegister()) { 278 if (source.IsRegister()) { 279 __ movl(destination.AsX86().AsRegisterPairLow(), source.AsX86().AsRegisterPairLow()); 280 __ movl(destination.AsX86().AsRegisterPairHigh(), source.AsX86().AsRegisterPairHigh()); 281 } else if (source.IsQuickParameter()) { 282 uint32_t argument_index = source.GetQuickParameterIndex(); 283 InvokeDexCallingConvention calling_convention; 284 __ movl(destination.AsX86().AsRegisterPairLow(), 285 calling_convention.GetRegisterAt(argument_index)); 286 __ movl(destination.AsX86().AsRegisterPairHigh(), Address(ESP, 287 calling_convention.GetStackOffsetOf(argument_index + 1, kX86WordSize) + GetFrameSize())); 288 } else { 289 DCHECK(source.IsDoubleStackSlot()); 290 __ movl(destination.AsX86().AsRegisterPairLow(), Address(ESP, source.GetStackIndex())); 291 __ movl(destination.AsX86().AsRegisterPairHigh(), 292 Address(ESP, source.GetHighStackIndex(kX86WordSize))); 293 } 294 } else if (destination.IsQuickParameter()) { 295 InvokeDexCallingConvention calling_convention; 296 uint32_t argument_index = destination.GetQuickParameterIndex(); 297 if (source.IsRegister()) { 298 __ movl(calling_convention.GetRegisterAt(argument_index), source.AsX86().AsRegisterPairLow()); 299 __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1, kX86WordSize)), 300 source.AsX86().AsRegisterPairHigh()); 301 } else { 302 DCHECK(source.IsDoubleStackSlot()); 303 __ movl(calling_convention.GetRegisterAt(argument_index), 304 Address(ESP, source.GetStackIndex())); 305 __ movl(EAX, Address(ESP, source.GetHighStackIndex(kX86WordSize))); 306 __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1, kX86WordSize)), EAX); 307 } 308 } else { 309 if (source.IsRegister()) { 310 __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsRegisterPairLow()); 311 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), 312 source.AsX86().AsRegisterPairHigh()); 313 } else if (source.IsQuickParameter()) { 314 InvokeDexCallingConvention calling_convention; 315 uint32_t argument_index = source.GetQuickParameterIndex(); 316 __ movl(Address(ESP, destination.GetStackIndex()), 317 calling_convention.GetRegisterAt(argument_index)); 318 __ movl(EAX, Address(ESP, 319 calling_convention.GetStackOffsetOf(argument_index + 1, kX86WordSize) + GetFrameSize())); 320 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), EAX); 321 } else { 322 DCHECK(source.IsDoubleStackSlot()); 323 __ movl(EAX, Address(ESP, source.GetStackIndex())); 324 __ movl(Address(ESP, destination.GetStackIndex()), EAX); 325 __ movl(EAX, Address(ESP, source.GetHighStackIndex(kX86WordSize))); 326 __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)), EAX); 327 } 328 } 329} 330 331void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) { 332 if (instruction->AsIntConstant() != nullptr) { 333 Immediate imm(instruction->AsIntConstant()->GetValue()); 334 if (location.IsRegister()) { 335 __ movl(location.AsX86().AsCpuRegister(), imm); 336 } else { 337 __ movl(Address(ESP, location.GetStackIndex()), imm); 338 } 339 } else if (instruction->AsLongConstant() != nullptr) { 340 int64_t value = instruction->AsLongConstant()->GetValue(); 341 if (location.IsRegister()) { 342 __ movl(location.AsX86().AsRegisterPairLow(), Immediate(Low32Bits(value))); 343 __ movl(location.AsX86().AsRegisterPairHigh(), Immediate(High32Bits(value))); 344 } else { 345 __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value))); 346 __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value))); 347 } 348 } else if (instruction->AsLoadLocal() != nullptr) { 349 switch (instruction->GetType()) { 350 case Primitive::kPrimBoolean: 351 case Primitive::kPrimByte: 352 case Primitive::kPrimChar: 353 case Primitive::kPrimShort: 354 case Primitive::kPrimInt: 355 case Primitive::kPrimNot: 356 Move32(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal()))); 357 break; 358 359 case Primitive::kPrimLong: 360 Move64(location, Location::DoubleStackSlot( 361 GetStackSlot(instruction->AsLoadLocal()->GetLocal()))); 362 break; 363 364 default: 365 LOG(FATAL) << "Unimplemented local type " << instruction->GetType(); 366 } 367 } else { 368 // This can currently only happen when the instruction that requests the move 369 // is the next to be compiled. 370 DCHECK_EQ(instruction->GetNext(), move_for); 371 switch (instruction->GetType()) { 372 case Primitive::kPrimBoolean: 373 case Primitive::kPrimByte: 374 case Primitive::kPrimChar: 375 case Primitive::kPrimShort: 376 case Primitive::kPrimInt: 377 case Primitive::kPrimNot: 378 Move32(location, instruction->GetLocations()->Out()); 379 break; 380 381 case Primitive::kPrimLong: 382 Move64(location, instruction->GetLocations()->Out()); 383 break; 384 385 default: 386 LOG(FATAL) << "Unimplemented type " << instruction->GetType(); 387 } 388 } 389} 390 391void LocationsBuilderX86::VisitGoto(HGoto* got) { 392 got->SetLocations(nullptr); 393} 394 395void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) { 396 HBasicBlock* successor = got->GetSuccessor(); 397 if (GetGraph()->GetExitBlock() == successor) { 398 codegen_->GenerateFrameExit(); 399 } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) { 400 __ jmp(codegen_->GetLabelOf(successor)); 401 } 402} 403 404void LocationsBuilderX86::VisitExit(HExit* exit) { 405 exit->SetLocations(nullptr); 406} 407 408void InstructionCodeGeneratorX86::VisitExit(HExit* exit) { 409 if (kIsDebugBuild) { 410 __ Comment("Unreachable"); 411 __ int3(); 412 } 413} 414 415void LocationsBuilderX86::VisitIf(HIf* if_instr) { 416 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr); 417 locations->SetInAt(0, Location::Any()); 418 if_instr->SetLocations(locations); 419} 420 421void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) { 422 // TODO: Generate the input as a condition, instead of materializing in a register. 423 Location location = if_instr->GetLocations()->InAt(0); 424 if (location.IsRegister()) { 425 __ cmpl(location.AsX86().AsCpuRegister(), Immediate(0)); 426 } else { 427 __ cmpl(Address(ESP, location.GetStackIndex()), Immediate(0)); 428 } 429 __ j(kEqual, codegen_->GetLabelOf(if_instr->IfFalseSuccessor())); 430 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) { 431 __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor())); 432 } 433} 434 435void LocationsBuilderX86::VisitLocal(HLocal* local) { 436 local->SetLocations(nullptr); 437} 438 439void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) { 440 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock()); 441} 442 443void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) { 444 local->SetLocations(nullptr); 445} 446 447void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) { 448 // Nothing to do, this is driven by the code generator. 449} 450 451void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) { 452 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store); 453 switch (store->InputAt(1)->GetType()) { 454 case Primitive::kPrimBoolean: 455 case Primitive::kPrimByte: 456 case Primitive::kPrimChar: 457 case Primitive::kPrimShort: 458 case Primitive::kPrimInt: 459 case Primitive::kPrimNot: 460 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal()))); 461 break; 462 463 case Primitive::kPrimLong: 464 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal()))); 465 break; 466 467 default: 468 LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType(); 469 } 470 store->SetLocations(locations); 471} 472 473void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) { 474} 475 476void LocationsBuilderX86::VisitEqual(HEqual* equal) { 477 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(equal); 478 locations->SetInAt(0, Location::RequiresRegister()); 479 locations->SetInAt(1, Location::Any()); 480 locations->SetOut(Location::SameAsFirstInput()); 481 equal->SetLocations(locations); 482} 483 484void InstructionCodeGeneratorX86::VisitEqual(HEqual* equal) { 485 LocationSummary* locations = equal->GetLocations(); 486 if (locations->InAt(1).IsRegister()) { 487 __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(), 488 locations->InAt(1).AsX86().AsCpuRegister()); 489 } else { 490 __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(), 491 Address(ESP, locations->InAt(1).GetStackIndex())); 492 } 493 __ setb(kEqual, locations->Out().AsX86().AsCpuRegister()); 494} 495 496void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) { 497 constant->SetLocations(nullptr); 498} 499 500void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) { 501 // Will be generated at use site. 502} 503 504void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) { 505 constant->SetLocations(nullptr); 506} 507 508void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) { 509 // Will be generated at use site. 510} 511 512void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) { 513 ret->SetLocations(nullptr); 514} 515 516void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) { 517 codegen_->GenerateFrameExit(); 518 __ ret(); 519} 520 521void LocationsBuilderX86::VisitReturn(HReturn* ret) { 522 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret); 523 switch (ret->InputAt(0)->GetType()) { 524 case Primitive::kPrimBoolean: 525 case Primitive::kPrimByte: 526 case Primitive::kPrimChar: 527 case Primitive::kPrimShort: 528 case Primitive::kPrimInt: 529 case Primitive::kPrimNot: 530 locations->SetInAt(0, X86CpuLocation(EAX)); 531 break; 532 533 case Primitive::kPrimLong: 534 locations->SetInAt( 535 0, Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX))); 536 break; 537 538 default: 539 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType(); 540 } 541 ret->SetLocations(locations); 542} 543 544void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) { 545 if (kIsDebugBuild) { 546 switch (ret->InputAt(0)->GetType()) { 547 case Primitive::kPrimBoolean: 548 case Primitive::kPrimByte: 549 case Primitive::kPrimChar: 550 case Primitive::kPrimShort: 551 case Primitive::kPrimInt: 552 case Primitive::kPrimNot: 553 DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsCpuRegister(), EAX); 554 break; 555 556 case Primitive::kPrimLong: 557 DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsRegisterPair(), EAX_EDX); 558 break; 559 560 default: 561 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType(); 562 } 563 } 564 codegen_->GenerateFrameExit(); 565 __ ret(); 566} 567 568void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) { 569 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke); 570 locations->AddTemp(Location::RequiresRegister()); 571 572 InvokeDexCallingConventionVisitor calling_convention_visitor; 573 for (size_t i = 0; i < invoke->InputCount(); i++) { 574 HInstruction* input = invoke->InputAt(i); 575 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType())); 576 } 577 578 switch (invoke->GetType()) { 579 case Primitive::kPrimBoolean: 580 case Primitive::kPrimByte: 581 case Primitive::kPrimChar: 582 case Primitive::kPrimShort: 583 case Primitive::kPrimInt: 584 case Primitive::kPrimNot: 585 locations->SetOut(X86CpuLocation(EAX)); 586 break; 587 588 case Primitive::kPrimLong: 589 locations->SetOut(Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX))); 590 break; 591 592 case Primitive::kPrimVoid: 593 break; 594 595 case Primitive::kPrimDouble: 596 case Primitive::kPrimFloat: 597 LOG(FATAL) << "Unimplemented return type " << invoke->GetType(); 598 break; 599 } 600 601 invoke->SetLocations(locations); 602} 603 604void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) { 605 Register temp = invoke->GetLocations()->GetTemp(0).AsX86().AsCpuRegister(); 606 size_t index_in_cache = mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() + 607 invoke->GetIndexInDexCache() * kX86WordSize; 608 609 // TODO: Implement all kinds of calls: 610 // 1) boot -> boot 611 // 2) app -> boot 612 // 3) app -> app 613 // 614 // Currently we implement the app -> app logic, which looks up in the resolve cache. 615 616 // temp = method; 617 LoadCurrentMethod(temp); 618 // temp = temp->dex_cache_resolved_methods_; 619 __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value())); 620 // temp = temp[index_in_cache] 621 __ movl(temp, Address(temp, index_in_cache)); 622 // (temp + offset_of_quick_compiled_code)() 623 __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value())); 624 625 codegen_->RecordPcInfo(invoke->GetDexPc()); 626} 627 628void LocationsBuilderX86::VisitAdd(HAdd* add) { 629 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add); 630 switch (add->GetResultType()) { 631 case Primitive::kPrimInt: 632 case Primitive::kPrimLong: { 633 locations->SetInAt(0, Location::RequiresRegister()); 634 locations->SetInAt(1, Location::Any()); 635 locations->SetOut(Location::SameAsFirstInput()); 636 break; 637 } 638 639 case Primitive::kPrimBoolean: 640 case Primitive::kPrimByte: 641 case Primitive::kPrimChar: 642 case Primitive::kPrimShort: 643 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 644 break; 645 646 default: 647 LOG(FATAL) << "Unimplemented add type " << add->GetResultType(); 648 } 649 add->SetLocations(locations); 650} 651 652void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) { 653 LocationSummary* locations = add->GetLocations(); 654 switch (add->GetResultType()) { 655 case Primitive::kPrimInt: { 656 DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), 657 locations->Out().AsX86().AsCpuRegister()); 658 if (locations->InAt(1).IsRegister()) { 659 __ addl(locations->InAt(0).AsX86().AsCpuRegister(), 660 locations->InAt(1).AsX86().AsCpuRegister()); 661 } else { 662 __ addl(locations->InAt(0).AsX86().AsCpuRegister(), 663 Address(ESP, locations->InAt(1).GetStackIndex())); 664 } 665 break; 666 } 667 668 case Primitive::kPrimLong: { 669 DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(), 670 locations->Out().AsX86().AsRegisterPair()); 671 if (locations->InAt(1).IsRegister()) { 672 __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(), 673 locations->InAt(1).AsX86().AsRegisterPairLow()); 674 __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(), 675 locations->InAt(1).AsX86().AsRegisterPairHigh()); 676 } else { 677 __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(), 678 Address(ESP, locations->InAt(1).GetStackIndex())); 679 __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(), 680 Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize))); 681 } 682 break; 683 } 684 685 case Primitive::kPrimBoolean: 686 case Primitive::kPrimByte: 687 case Primitive::kPrimChar: 688 case Primitive::kPrimShort: 689 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 690 break; 691 692 default: 693 LOG(FATAL) << "Unimplemented add type " << add->GetResultType(); 694 } 695} 696 697void LocationsBuilderX86::VisitSub(HSub* sub) { 698 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub); 699 switch (sub->GetResultType()) { 700 case Primitive::kPrimInt: 701 case Primitive::kPrimLong: { 702 locations->SetInAt(0, Location::RequiresRegister()); 703 locations->SetInAt(1, Location::Any()); 704 locations->SetOut(Location::SameAsFirstInput()); 705 break; 706 } 707 708 case Primitive::kPrimBoolean: 709 case Primitive::kPrimByte: 710 case Primitive::kPrimChar: 711 case Primitive::kPrimShort: 712 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 713 break; 714 715 default: 716 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType(); 717 } 718 sub->SetLocations(locations); 719} 720 721void InstructionCodeGeneratorX86::VisitSub(HSub* sub) { 722 LocationSummary* locations = sub->GetLocations(); 723 switch (sub->GetResultType()) { 724 case Primitive::kPrimInt: { 725 DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), 726 locations->Out().AsX86().AsCpuRegister()); 727 if (locations->InAt(1).IsRegister()) { 728 __ subl(locations->InAt(0).AsX86().AsCpuRegister(), 729 locations->InAt(1).AsX86().AsCpuRegister()); 730 } else { 731 __ subl(locations->InAt(0).AsX86().AsCpuRegister(), 732 Address(ESP, locations->InAt(1).GetStackIndex())); 733 } 734 break; 735 } 736 737 case Primitive::kPrimLong: { 738 DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(), 739 locations->Out().AsX86().AsRegisterPair()); 740 if (locations->InAt(1).IsRegister()) { 741 __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(), 742 locations->InAt(1).AsX86().AsRegisterPairLow()); 743 __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(), 744 locations->InAt(1).AsX86().AsRegisterPairHigh()); 745 } else { 746 __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(), 747 Address(ESP, locations->InAt(1).GetStackIndex())); 748 __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(), 749 Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize))); 750 } 751 break; 752 } 753 754 case Primitive::kPrimBoolean: 755 case Primitive::kPrimByte: 756 case Primitive::kPrimChar: 757 case Primitive::kPrimShort: 758 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 759 break; 760 761 default: 762 LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType(); 763 } 764} 765 766void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) { 767 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 768 locations->SetOut(X86CpuLocation(EAX)); 769 InvokeRuntimeCallingConvention calling_convention; 770 locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(0))); 771 locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(1))); 772 instruction->SetLocations(locations); 773} 774 775void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) { 776 InvokeRuntimeCallingConvention calling_convention; 777 LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 778 __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex())); 779 780 __ fs()->call( 781 Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck))); 782 783 codegen_->RecordPcInfo(instruction->GetDexPc()); 784} 785 786void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) { 787 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 788 Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); 789 if (location.IsStackSlot()) { 790 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 791 } else if (location.IsDoubleStackSlot()) { 792 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 793 } 794 locations->SetOut(location); 795 instruction->SetLocations(locations); 796} 797 798void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) { 799 // Nothing to do, the parameter is already at its location. 800} 801 802void LocationsBuilderX86::VisitNot(HNot* instruction) { 803 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); 804 locations->SetInAt(0, Location::RequiresRegister()); 805 locations->SetOut(Location::SameAsFirstInput()); 806 instruction->SetLocations(locations); 807} 808 809void InstructionCodeGeneratorX86::VisitNot(HNot* instruction) { 810 LocationSummary* locations = instruction->GetLocations(); 811 Location out = locations->Out(); 812 DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), out.AsX86().AsCpuRegister()); 813 __ xorl(out.AsX86().AsCpuRegister(), Immediate(1)); 814} 815 816void LocationsBuilderX86::VisitPhi(HPhi* instruction) { 817 LOG(FATAL) << "Unimplemented"; 818} 819 820void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) { 821 LOG(FATAL) << "Unimplemented"; 822} 823 824void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) { 825 LOG(FATAL) << "Unimplemented"; 826} 827 828void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) { 829 LOG(FATAL) << "Unimplemented"; 830} 831 832} // namespace x86 833} // namespace art 834