code_generator_x86.cc revision 9cf35523764d829ae0470dae2d5dd99be469c841
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18#include "utils/assembler.h"
19#include "utils/x86/assembler_x86.h"
20#include "utils/x86/managed_register_x86.h"
21
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "mirror/array.h"
24#include "mirror/art_method.h"
25#include "thread.h"
26
27#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
28
29namespace art {
30
31x86::X86ManagedRegister Location::AsX86() const {
32  return reg().AsX86();
33}
34
35namespace x86 {
36
37static constexpr int kNumberOfPushedRegistersAtEntry = 1;
38static constexpr int kCurrentMethodStackOffset = 0;
39
40void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
41  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
42}
43
44void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
45  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
46}
47
48CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
49    : CodeGenerator(graph, kNumberOfRegIds),
50      location_builder_(graph, this),
51      instruction_visitor_(graph, this),
52      move_resolver_(graph->GetArena(), this) {}
53
54void CodeGeneratorX86::ComputeFrameSize(size_t number_of_spill_slots) {
55  SetFrameSize(RoundUp(
56      number_of_spill_slots * kVRegSize
57      + kVRegSize  // Art method
58      + kNumberOfPushedRegistersAtEntry * kX86WordSize,
59      kStackAlignment));
60}
61
62static bool* GetBlockedRegisterPairs(bool* blocked_registers) {
63  return blocked_registers + kNumberOfAllocIds;
64}
65
66ManagedRegister CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type,
67                                                       bool* blocked_registers) const {
68  switch (type) {
69    case Primitive::kPrimLong: {
70      size_t reg = AllocateFreeRegisterInternal(
71          GetBlockedRegisterPairs(blocked_registers), kNumberOfRegisterPairs);
72      X86ManagedRegister pair =
73          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
74      blocked_registers[pair.AsRegisterPairLow()] = true;
75      blocked_registers[pair.AsRegisterPairHigh()] = true;
76      return pair;
77    }
78
79    case Primitive::kPrimByte:
80    case Primitive::kPrimBoolean:
81    case Primitive::kPrimChar:
82    case Primitive::kPrimShort:
83    case Primitive::kPrimInt:
84    case Primitive::kPrimNot: {
85      size_t reg = AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters);
86      return X86ManagedRegister::FromCpuRegister(static_cast<Register>(reg));
87    }
88
89    case Primitive::kPrimFloat:
90    case Primitive::kPrimDouble:
91      LOG(FATAL) << "Unimplemented register type " << type;
92
93    case Primitive::kPrimVoid:
94      LOG(FATAL) << "Unreachable type " << type;
95  }
96
97  return ManagedRegister::NoRegister();
98}
99
100void CodeGeneratorX86::SetupBlockedRegisters(bool* blocked_registers) const {
101  bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
102
103  // Don't allocate the dalvik style register pair passing.
104  blocked_register_pairs[ECX_EDX] = true;
105
106  // Stack register is always reserved.
107  blocked_registers[ESP] = true;
108
109  // TODO: We currently don't use Quick's callee saved registers.
110  blocked_registers[EBP] = true;
111  blocked_registers[ESI] = true;
112  blocked_registers[EDI] = true;
113  blocked_register_pairs[EAX_EDI] = true;
114  blocked_register_pairs[EDX_EDI] = true;
115  blocked_register_pairs[ECX_EDI] = true;
116  blocked_register_pairs[EBX_EDI] = true;
117}
118
119size_t CodeGeneratorX86::GetNumberOfRegisters() const {
120  return kNumberOfRegIds;
121}
122
123static Location X86CpuLocation(Register reg) {
124  return Location::RegisterLocation(X86ManagedRegister::FromCpuRegister(reg));
125}
126
127InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
128      : HGraphVisitor(graph),
129        assembler_(codegen->GetAssembler()),
130        codegen_(codegen) {}
131
132void CodeGeneratorX86::GenerateFrameEntry() {
133  // Create a fake register to mimic Quick.
134  static const int kFakeReturnRegister = 8;
135  core_spill_mask_ |= (1 << kFakeReturnRegister);
136
137  // The return PC has already been pushed on the stack.
138  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
139  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
140}
141
142void CodeGeneratorX86::GenerateFrameExit() {
143  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
144}
145
146void CodeGeneratorX86::Bind(Label* label) {
147  __ Bind(label);
148}
149
150void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) {
151  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
152}
153
154int32_t CodeGeneratorX86::GetStackSlot(HLocal* local) const {
155  uint16_t reg_number = local->GetRegNumber();
156  uint16_t number_of_vregs = GetGraph()->GetNumberOfVRegs();
157  uint16_t number_of_in_vregs = GetGraph()->GetNumberOfInVRegs();
158  if (reg_number >= number_of_vregs - number_of_in_vregs) {
159    // Local is a parameter of the method. It is stored in the caller's frame.
160    return GetFrameSize() + kVRegSize  // ART method
161                          + (reg_number - number_of_vregs + number_of_in_vregs) * kVRegSize;
162  } else {
163    // Local is a temporary in this method. It is stored in this method's frame.
164    return GetFrameSize() - (kNumberOfPushedRegistersAtEntry * kX86WordSize)
165                          - kVRegSize  // filler.
166                          - (number_of_vregs * kVRegSize)
167                          + (reg_number * kVRegSize);
168  }
169}
170
171
172Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
173  switch (load->GetType()) {
174    case Primitive::kPrimLong:
175      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
176      break;
177
178    case Primitive::kPrimInt:
179    case Primitive::kPrimNot:
180      return Location::StackSlot(GetStackSlot(load->GetLocal()));
181
182    case Primitive::kPrimFloat:
183    case Primitive::kPrimDouble:
184      LOG(FATAL) << "Unimplemented type " << load->GetType();
185
186    case Primitive::kPrimBoolean:
187    case Primitive::kPrimByte:
188    case Primitive::kPrimChar:
189    case Primitive::kPrimShort:
190    case Primitive::kPrimVoid:
191      LOG(FATAL) << "Unexpected type " << load->GetType();
192  }
193
194  LOG(FATAL) << "Unreachable";
195  return Location();
196}
197
198static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX };
199static constexpr size_t kRuntimeParameterCoreRegistersLength =
200    arraysize(kRuntimeParameterCoreRegisters);
201
202class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
203 public:
204  InvokeRuntimeCallingConvention()
205      : CallingConvention(kRuntimeParameterCoreRegisters,
206                          kRuntimeParameterCoreRegistersLength) {}
207
208 private:
209  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
210};
211
212Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
213  switch (type) {
214    case Primitive::kPrimBoolean:
215    case Primitive::kPrimByte:
216    case Primitive::kPrimChar:
217    case Primitive::kPrimShort:
218    case Primitive::kPrimInt:
219    case Primitive::kPrimNot: {
220      uint32_t index = gp_index_++;
221      if (index < calling_convention.GetNumberOfRegisters()) {
222        return X86CpuLocation(calling_convention.GetRegisterAt(index));
223      } else {
224        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
225      }
226    }
227
228    case Primitive::kPrimLong: {
229      uint32_t index = gp_index_;
230      gp_index_ += 2;
231      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
232        return Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(
233            calling_convention.GetRegisterPairAt(index)));
234      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
235        return Location::QuickParameter(index);
236      } else {
237        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
238      }
239    }
240
241    case Primitive::kPrimDouble:
242    case Primitive::kPrimFloat:
243      LOG(FATAL) << "Unimplemented parameter type " << type;
244      break;
245
246    case Primitive::kPrimVoid:
247      LOG(FATAL) << "Unexpected parameter type " << type;
248      break;
249  }
250  return Location();
251}
252
253void CodeGeneratorX86::Move32(Location destination, Location source) {
254  if (source.Equals(destination)) {
255    return;
256  }
257  if (destination.IsRegister()) {
258    if (source.IsRegister()) {
259      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
260    } else {
261      DCHECK(source.IsStackSlot());
262      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
263    }
264  } else {
265    if (source.IsRegister()) {
266      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
267    } else {
268      DCHECK(source.IsStackSlot());
269      __ pushl(Address(ESP, source.GetStackIndex()));
270      __ popl(Address(ESP, destination.GetStackIndex()));
271    }
272  }
273}
274
275void CodeGeneratorX86::Move64(Location destination, Location source) {
276  if (source.Equals(destination)) {
277    return;
278  }
279  if (destination.IsRegister()) {
280    if (source.IsRegister()) {
281      __ movl(destination.AsX86().AsRegisterPairLow(), source.AsX86().AsRegisterPairLow());
282      __ movl(destination.AsX86().AsRegisterPairHigh(), source.AsX86().AsRegisterPairHigh());
283    } else if (source.IsQuickParameter()) {
284      uint32_t argument_index = source.GetQuickParameterIndex();
285      InvokeDexCallingConvention calling_convention;
286      __ movl(destination.AsX86().AsRegisterPairLow(),
287              calling_convention.GetRegisterAt(argument_index));
288      __ movl(destination.AsX86().AsRegisterPairHigh(), Address(ESP,
289          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
290    } else {
291      DCHECK(source.IsDoubleStackSlot());
292      __ movl(destination.AsX86().AsRegisterPairLow(), Address(ESP, source.GetStackIndex()));
293      __ movl(destination.AsX86().AsRegisterPairHigh(),
294              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
295    }
296  } else if (destination.IsQuickParameter()) {
297    InvokeDexCallingConvention calling_convention;
298    uint32_t argument_index = destination.GetQuickParameterIndex();
299    if (source.IsRegister()) {
300      __ movl(calling_convention.GetRegisterAt(argument_index), source.AsX86().AsRegisterPairLow());
301      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)),
302              source.AsX86().AsRegisterPairHigh());
303    } else {
304      DCHECK(source.IsDoubleStackSlot());
305      __ movl(calling_convention.GetRegisterAt(argument_index),
306              Address(ESP, source.GetStackIndex()));
307      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
308      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)));
309    }
310  } else {
311    if (source.IsRegister()) {
312      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsRegisterPairLow());
313      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
314              source.AsX86().AsRegisterPairHigh());
315    } else if (source.IsQuickParameter()) {
316      InvokeDexCallingConvention calling_convention;
317      uint32_t argument_index = source.GetQuickParameterIndex();
318      __ movl(Address(ESP, destination.GetStackIndex()),
319              calling_convention.GetRegisterAt(argument_index));
320      __ pushl(Address(ESP,
321          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
322      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
323    } else {
324      DCHECK(source.IsDoubleStackSlot());
325      __ pushl(Address(ESP, source.GetStackIndex()));
326      __ popl(Address(ESP, destination.GetStackIndex()));
327      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
328      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
329    }
330  }
331}
332
333void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
334  if (instruction->AsIntConstant() != nullptr) {
335    Immediate imm(instruction->AsIntConstant()->GetValue());
336    if (location.IsRegister()) {
337      __ movl(location.AsX86().AsCpuRegister(), imm);
338    } else {
339      __ movl(Address(ESP, location.GetStackIndex()), imm);
340    }
341  } else if (instruction->AsLongConstant() != nullptr) {
342    int64_t value = instruction->AsLongConstant()->GetValue();
343    if (location.IsRegister()) {
344      __ movl(location.AsX86().AsRegisterPairLow(), Immediate(Low32Bits(value)));
345      __ movl(location.AsX86().AsRegisterPairHigh(), Immediate(High32Bits(value)));
346    } else {
347      __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
348      __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
349    }
350  } else if (instruction->AsLoadLocal() != nullptr) {
351    switch (instruction->GetType()) {
352      case Primitive::kPrimBoolean:
353      case Primitive::kPrimByte:
354      case Primitive::kPrimChar:
355      case Primitive::kPrimShort:
356      case Primitive::kPrimInt:
357      case Primitive::kPrimNot:
358        Move32(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
359        break;
360
361      case Primitive::kPrimLong:
362        Move64(location, Location::DoubleStackSlot(
363            GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
364        break;
365
366      default:
367        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
368    }
369  } else {
370    // This can currently only happen when the instruction that requests the move
371    // is the next to be compiled.
372    DCHECK_EQ(instruction->GetNext(), move_for);
373    switch (instruction->GetType()) {
374      case Primitive::kPrimBoolean:
375      case Primitive::kPrimByte:
376      case Primitive::kPrimChar:
377      case Primitive::kPrimShort:
378      case Primitive::kPrimInt:
379      case Primitive::kPrimNot:
380        Move32(location, instruction->GetLocations()->Out());
381        break;
382
383      case Primitive::kPrimLong:
384        Move64(location, instruction->GetLocations()->Out());
385        break;
386
387      default:
388        LOG(FATAL) << "Unimplemented type " << instruction->GetType();
389    }
390  }
391}
392
393void LocationsBuilderX86::VisitGoto(HGoto* got) {
394  got->SetLocations(nullptr);
395}
396
397void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
398  HBasicBlock* successor = got->GetSuccessor();
399  if (GetGraph()->GetExitBlock() == successor) {
400    codegen_->GenerateFrameExit();
401  } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
402    __ jmp(codegen_->GetLabelOf(successor));
403  }
404}
405
406void LocationsBuilderX86::VisitExit(HExit* exit) {
407  exit->SetLocations(nullptr);
408}
409
410void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
411  if (kIsDebugBuild) {
412    __ Comment("Unreachable");
413    __ int3();
414  }
415}
416
417void LocationsBuilderX86::VisitIf(HIf* if_instr) {
418  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
419  locations->SetInAt(0, Location::Any());
420  if_instr->SetLocations(locations);
421}
422
423void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
424  // TODO: Generate the input as a condition, instead of materializing in a register.
425  Location location = if_instr->GetLocations()->InAt(0);
426  if (location.IsRegister()) {
427    __ cmpl(location.AsX86().AsCpuRegister(), Immediate(0));
428  } else {
429    __ cmpl(Address(ESP, location.GetStackIndex()), Immediate(0));
430  }
431  __ j(kEqual, codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
432  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfTrueSuccessor())) {
433    __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
434  }
435}
436
437void LocationsBuilderX86::VisitLocal(HLocal* local) {
438  local->SetLocations(nullptr);
439}
440
441void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
442  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
443}
444
445void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
446  local->SetLocations(nullptr);
447}
448
449void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
450  // Nothing to do, this is driven by the code generator.
451}
452
453void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
454  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
455  switch (store->InputAt(1)->GetType()) {
456    case Primitive::kPrimBoolean:
457    case Primitive::kPrimByte:
458    case Primitive::kPrimChar:
459    case Primitive::kPrimShort:
460    case Primitive::kPrimInt:
461    case Primitive::kPrimNot:
462      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
463      break;
464
465    case Primitive::kPrimLong:
466      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
467      break;
468
469    default:
470      LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
471  }
472  store->SetLocations(locations);
473}
474
475void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
476}
477
478void LocationsBuilderX86::VisitEqual(HEqual* equal) {
479  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(equal);
480  locations->SetInAt(0, Location::RequiresRegister());
481  locations->SetInAt(1, Location::Any());
482  locations->SetOut(Location::SameAsFirstInput());
483  equal->SetLocations(locations);
484}
485
486void InstructionCodeGeneratorX86::VisitEqual(HEqual* equal) {
487  LocationSummary* locations = equal->GetLocations();
488  if (locations->InAt(1).IsRegister()) {
489    __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
490            locations->InAt(1).AsX86().AsCpuRegister());
491  } else {
492    __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
493            Address(ESP, locations->InAt(1).GetStackIndex()));
494  }
495  __ setb(kEqual, locations->Out().AsX86().AsCpuRegister());
496}
497
498void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
499  // TODO: Support constant locations.
500  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
501  locations->SetOut(Location::RequiresRegister());
502  constant->SetLocations(locations);
503}
504
505void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
506  codegen_->Move(constant, constant->GetLocations()->Out(), nullptr);
507}
508
509void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
510  // TODO: Support constant locations.
511  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
512  locations->SetOut(Location::RequiresRegister());
513  constant->SetLocations(locations);
514}
515
516void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
517  // Will be generated at use site.
518}
519
520void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
521  ret->SetLocations(nullptr);
522}
523
524void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
525  codegen_->GenerateFrameExit();
526  __ ret();
527}
528
529void LocationsBuilderX86::VisitReturn(HReturn* ret) {
530  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
531  switch (ret->InputAt(0)->GetType()) {
532    case Primitive::kPrimBoolean:
533    case Primitive::kPrimByte:
534    case Primitive::kPrimChar:
535    case Primitive::kPrimShort:
536    case Primitive::kPrimInt:
537    case Primitive::kPrimNot:
538      locations->SetInAt(0, X86CpuLocation(EAX));
539      break;
540
541    case Primitive::kPrimLong:
542      locations->SetInAt(
543          0, Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
544      break;
545
546    default:
547      LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
548  }
549  ret->SetLocations(locations);
550}
551
552void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
553  if (kIsDebugBuild) {
554    switch (ret->InputAt(0)->GetType()) {
555      case Primitive::kPrimBoolean:
556      case Primitive::kPrimByte:
557      case Primitive::kPrimChar:
558      case Primitive::kPrimShort:
559      case Primitive::kPrimInt:
560      case Primitive::kPrimNot:
561        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsCpuRegister(), EAX);
562        break;
563
564      case Primitive::kPrimLong:
565        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsRegisterPair(), EAX_EDX);
566        break;
567
568      default:
569        LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
570    }
571  }
572  codegen_->GenerateFrameExit();
573  __ ret();
574}
575
576void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
577  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
578  locations->AddTemp(X86CpuLocation(EAX));
579
580  InvokeDexCallingConventionVisitor calling_convention_visitor;
581  for (size_t i = 0; i < invoke->InputCount(); i++) {
582    HInstruction* input = invoke->InputAt(i);
583    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
584  }
585
586  switch (invoke->GetType()) {
587    case Primitive::kPrimBoolean:
588    case Primitive::kPrimByte:
589    case Primitive::kPrimChar:
590    case Primitive::kPrimShort:
591    case Primitive::kPrimInt:
592    case Primitive::kPrimNot:
593      locations->SetOut(X86CpuLocation(EAX));
594      break;
595
596    case Primitive::kPrimLong:
597      locations->SetOut(Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
598      break;
599
600    case Primitive::kPrimVoid:
601      break;
602
603    case Primitive::kPrimDouble:
604    case Primitive::kPrimFloat:
605      LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
606      break;
607  }
608
609  invoke->SetLocations(locations);
610}
611
612void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
613  Register temp = invoke->GetLocations()->GetTemp(0).AsX86().AsCpuRegister();
614  size_t index_in_cache = mirror::Array::DataOffset(sizeof(mirror::Object*)).Int32Value() +
615      invoke->GetIndexInDexCache() * kX86WordSize;
616
617  // TODO: Implement all kinds of calls:
618  // 1) boot -> boot
619  // 2) app -> boot
620  // 3) app -> app
621  //
622  // Currently we implement the app -> app logic, which looks up in the resolve cache.
623
624  // temp = method;
625  LoadCurrentMethod(temp);
626  // temp = temp->dex_cache_resolved_methods_;
627  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
628  // temp = temp[index_in_cache]
629  __ movl(temp, Address(temp, index_in_cache));
630  // (temp + offset_of_quick_compiled_code)()
631  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
632
633  codegen_->RecordPcInfo(invoke->GetDexPc());
634}
635
636void LocationsBuilderX86::VisitAdd(HAdd* add) {
637  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
638  switch (add->GetResultType()) {
639    case Primitive::kPrimInt:
640    case Primitive::kPrimLong: {
641      locations->SetInAt(0, Location::RequiresRegister());
642      locations->SetInAt(1, Location::Any());
643      locations->SetOut(Location::SameAsFirstInput());
644      break;
645    }
646
647    case Primitive::kPrimBoolean:
648    case Primitive::kPrimByte:
649    case Primitive::kPrimChar:
650    case Primitive::kPrimShort:
651      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
652      break;
653
654    default:
655      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
656  }
657  add->SetLocations(locations);
658}
659
660void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
661  LocationSummary* locations = add->GetLocations();
662  switch (add->GetResultType()) {
663    case Primitive::kPrimInt: {
664      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
665                locations->Out().AsX86().AsCpuRegister());
666      if (locations->InAt(1).IsRegister()) {
667        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
668                locations->InAt(1).AsX86().AsCpuRegister());
669      } else {
670        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
671                Address(ESP, locations->InAt(1).GetStackIndex()));
672      }
673      break;
674    }
675
676    case Primitive::kPrimLong: {
677      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
678                locations->Out().AsX86().AsRegisterPair());
679      if (locations->InAt(1).IsRegister()) {
680        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
681                locations->InAt(1).AsX86().AsRegisterPairLow());
682        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
683                locations->InAt(1).AsX86().AsRegisterPairHigh());
684      } else {
685        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
686                Address(ESP, locations->InAt(1).GetStackIndex()));
687        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
688                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
689      }
690      break;
691    }
692
693    case Primitive::kPrimBoolean:
694    case Primitive::kPrimByte:
695    case Primitive::kPrimChar:
696    case Primitive::kPrimShort:
697      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
698      break;
699
700    default:
701      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
702  }
703}
704
705void LocationsBuilderX86::VisitSub(HSub* sub) {
706  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
707  switch (sub->GetResultType()) {
708    case Primitive::kPrimInt:
709    case Primitive::kPrimLong: {
710      locations->SetInAt(0, Location::RequiresRegister());
711      locations->SetInAt(1, Location::Any());
712      locations->SetOut(Location::SameAsFirstInput());
713      break;
714    }
715
716    case Primitive::kPrimBoolean:
717    case Primitive::kPrimByte:
718    case Primitive::kPrimChar:
719    case Primitive::kPrimShort:
720      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
721      break;
722
723    default:
724      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
725  }
726  sub->SetLocations(locations);
727}
728
729void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
730  LocationSummary* locations = sub->GetLocations();
731  switch (sub->GetResultType()) {
732    case Primitive::kPrimInt: {
733      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
734                locations->Out().AsX86().AsCpuRegister());
735      if (locations->InAt(1).IsRegister()) {
736        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
737                locations->InAt(1).AsX86().AsCpuRegister());
738      } else {
739        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
740                Address(ESP, locations->InAt(1).GetStackIndex()));
741      }
742      break;
743    }
744
745    case Primitive::kPrimLong: {
746      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
747                locations->Out().AsX86().AsRegisterPair());
748      if (locations->InAt(1).IsRegister()) {
749        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
750                locations->InAt(1).AsX86().AsRegisterPairLow());
751        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
752                locations->InAt(1).AsX86().AsRegisterPairHigh());
753      } else {
754        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
755                Address(ESP, locations->InAt(1).GetStackIndex()));
756        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
757                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
758      }
759      break;
760    }
761
762    case Primitive::kPrimBoolean:
763    case Primitive::kPrimByte:
764    case Primitive::kPrimChar:
765    case Primitive::kPrimShort:
766      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
767      break;
768
769    default:
770      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
771  }
772}
773
774void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
775  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
776  locations->SetOut(X86CpuLocation(EAX));
777  InvokeRuntimeCallingConvention calling_convention;
778  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(0)));
779  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(1)));
780  instruction->SetLocations(locations);
781}
782
783void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
784  InvokeRuntimeCallingConvention calling_convention;
785  LoadCurrentMethod(calling_convention.GetRegisterAt(1));
786  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
787
788  __ fs()->call(
789      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
790
791  codegen_->RecordPcInfo(instruction->GetDexPc());
792}
793
794void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
795  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
796  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
797  if (location.IsStackSlot()) {
798    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
799  } else if (location.IsDoubleStackSlot()) {
800    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
801  }
802  locations->SetOut(location);
803  instruction->SetLocations(locations);
804}
805
806void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
807}
808
809void LocationsBuilderX86::VisitNot(HNot* instruction) {
810  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
811  locations->SetInAt(0, Location::RequiresRegister());
812  locations->SetOut(Location::SameAsFirstInput());
813  instruction->SetLocations(locations);
814}
815
816void InstructionCodeGeneratorX86::VisitNot(HNot* instruction) {
817  LocationSummary* locations = instruction->GetLocations();
818  Location out = locations->Out();
819  DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), out.AsX86().AsCpuRegister());
820  __ xorl(out.AsX86().AsCpuRegister(), Immediate(1));
821}
822
823void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
824  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
825  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
826    locations->SetInAt(i, Location::Any());
827  }
828  locations->SetOut(Location::Any());
829  instruction->SetLocations(locations);
830}
831
832void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
833  LOG(FATAL) << "Unreachable";
834}
835
836void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
837  LOG(FATAL) << "Unreachable";
838}
839
840void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
841  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
842}
843
844X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
845  return codegen_->GetAssembler();
846}
847
848void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
849  ScratchRegisterScope ensure_scratch(
850      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
851  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
852  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
853  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
854}
855
856void ParallelMoveResolverX86::EmitMove(size_t index) {
857  MoveOperands* move = moves_.Get(index);
858  Location source = move->GetSource();
859  Location destination = move->GetDestination();
860
861  if (source.IsRegister()) {
862    if (destination.IsRegister()) {
863      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
864    } else {
865      DCHECK(destination.IsStackSlot());
866      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
867    }
868  } else if (source.IsStackSlot()) {
869    if (destination.IsRegister()) {
870      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
871    } else {
872      DCHECK(destination.IsStackSlot());
873      MoveMemoryToMemory(destination.GetStackIndex(),
874                         source.GetStackIndex());
875    }
876  } else {
877    LOG(FATAL) << "Unimplemented";
878  }
879}
880
881void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
882  Register suggested_scratch = reg == EAX ? EBX : EAX;
883  ScratchRegisterScope ensure_scratch(
884      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
885
886  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
887  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
888  __ movl(Address(ESP, mem + stack_offset), reg);
889  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
890}
891
892
893void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
894  ScratchRegisterScope ensure_scratch1(
895      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
896
897  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
898  ScratchRegisterScope ensure_scratch2(
899      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
900
901  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
902  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
903  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
904  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
905  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
906  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
907}
908
909void ParallelMoveResolverX86::EmitSwap(size_t index) {
910  MoveOperands* move = moves_.Get(index);
911  Location source = move->GetSource();
912  Location destination = move->GetDestination();
913
914  if (source.IsRegister() && destination.IsRegister()) {
915    __ xchgl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
916  } else if (source.IsRegister() && destination.IsStackSlot()) {
917    Exchange(source.AsX86().AsCpuRegister(), destination.GetStackIndex());
918  } else if (source.IsStackSlot() && destination.IsRegister()) {
919    Exchange(destination.AsX86().AsCpuRegister(), source.GetStackIndex());
920  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
921    Exchange(destination.GetStackIndex(), source.GetStackIndex());
922  } else {
923    LOG(FATAL) << "Unimplemented";
924  }
925}
926
927void ParallelMoveResolverX86::SpillScratch(int reg) {
928  __ pushl(static_cast<Register>(reg));
929}
930
931void ParallelMoveResolverX86::RestoreScratch(int reg) {
932  __ popl(static_cast<Register>(reg));
933}
934
935}  // namespace x86
936}  // namespace art
937