code_generator_x86.cc revision 96f89a290eb67d7bf4b1636798fa28df14309cc7
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18#include "utils/assembler.h"
19#include "utils/x86/assembler_x86.h"
20#include "utils/x86/managed_register_x86.h"
21
22#include "entrypoints/quick/quick_entrypoints.h"
23#include "mirror/array.h"
24#include "mirror/art_method.h"
25#include "thread.h"
26
27namespace art {
28
29x86::X86ManagedRegister Location::AsX86() const {
30  return reg().AsX86();
31}
32
33namespace x86 {
34
35#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
36
37class NullCheckSlowPathX86 : public SlowPathCode {
38 public:
39  explicit NullCheckSlowPathX86(uint32_t dex_pc) : dex_pc_(dex_pc) {}
40
41  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
42    __ Bind(GetEntryLabel());
43    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
44    codegen->RecordPcInfo(dex_pc_);
45  }
46
47 private:
48  const uint32_t dex_pc_;
49  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
50};
51
52#undef __
53#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
54
55inline Condition X86Condition(IfCondition cond) {
56  switch (cond) {
57    case kCondEQ: return kEqual;
58    case kCondNE: return kNotEqual;
59    case kCondLT: return kLess;
60    case kCondLE: return kLessEqual;
61    case kCondGT: return kGreater;
62    case kCondGE: return kGreaterEqual;
63    default:
64      LOG(FATAL) << "Unknown if condition";
65  }
66  return kEqual;
67}
68
69static constexpr int kNumberOfPushedRegistersAtEntry = 1;
70static constexpr int kCurrentMethodStackOffset = 0;
71
72void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
73  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
74}
75
76void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
77  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
78}
79
80CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
81    : CodeGenerator(graph, kNumberOfRegIds),
82      location_builder_(graph, this),
83      instruction_visitor_(graph, this),
84      move_resolver_(graph->GetArena(), this) {}
85
86size_t CodeGeneratorX86::FrameEntrySpillSize() const {
87  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
88}
89
90static bool* GetBlockedRegisterPairs(bool* blocked_registers) {
91  return blocked_registers + kNumberOfAllocIds;
92}
93
94ManagedRegister CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type,
95                                                       bool* blocked_registers) const {
96  switch (type) {
97    case Primitive::kPrimLong: {
98      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
99      size_t reg = AllocateFreeRegisterInternal(blocked_register_pairs, kNumberOfRegisterPairs);
100      X86ManagedRegister pair =
101          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
102      blocked_registers[pair.AsRegisterPairLow()] = true;
103      blocked_registers[pair.AsRegisterPairHigh()] = true;
104      // Block all other register pairs that share a register with `pair`.
105      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
106        X86ManagedRegister current =
107            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
108        if (current.AsRegisterPairLow() == pair.AsRegisterPairLow()
109            || current.AsRegisterPairLow() == pair.AsRegisterPairHigh()
110            || current.AsRegisterPairHigh() == pair.AsRegisterPairLow()
111            || current.AsRegisterPairHigh() == pair.AsRegisterPairHigh()) {
112          blocked_register_pairs[i] = true;
113        }
114      }
115      return pair;
116    }
117
118    case Primitive::kPrimByte:
119    case Primitive::kPrimBoolean:
120    case Primitive::kPrimChar:
121    case Primitive::kPrimShort:
122    case Primitive::kPrimInt:
123    case Primitive::kPrimNot: {
124      Register reg = static_cast<Register>(
125          AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters));
126      // Block all register pairs that contain `reg`.
127      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
128      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
129        X86ManagedRegister current =
130            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
131        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
132          blocked_register_pairs[i] = true;
133        }
134      }
135      return X86ManagedRegister::FromCpuRegister(reg);
136    }
137
138    case Primitive::kPrimFloat:
139    case Primitive::kPrimDouble:
140      LOG(FATAL) << "Unimplemented register type " << type;
141
142    case Primitive::kPrimVoid:
143      LOG(FATAL) << "Unreachable type " << type;
144  }
145
146  return ManagedRegister::NoRegister();
147}
148
149void CodeGeneratorX86::SetupBlockedRegisters(bool* blocked_registers) const {
150  bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
151
152  // Don't allocate the dalvik style register pair passing.
153  blocked_register_pairs[ECX_EDX] = true;
154
155  // Stack register is always reserved.
156  blocked_registers[ESP] = true;
157
158  // TODO: We currently don't use Quick's callee saved registers.
159  blocked_registers[EBP] = true;
160  blocked_registers[ESI] = true;
161  blocked_registers[EDI] = true;
162  blocked_register_pairs[EAX_EDI] = true;
163  blocked_register_pairs[EDX_EDI] = true;
164  blocked_register_pairs[ECX_EDI] = true;
165  blocked_register_pairs[EBX_EDI] = true;
166}
167
168size_t CodeGeneratorX86::GetNumberOfRegisters() const {
169  return kNumberOfRegIds;
170}
171
172static Location X86CpuLocation(Register reg) {
173  return Location::RegisterLocation(X86ManagedRegister::FromCpuRegister(reg));
174}
175
176InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
177      : HGraphVisitor(graph),
178        assembler_(codegen->GetAssembler()),
179        codegen_(codegen) {}
180
181void CodeGeneratorX86::GenerateFrameEntry() {
182  // Create a fake register to mimic Quick.
183  static const int kFakeReturnRegister = 8;
184  core_spill_mask_ |= (1 << kFakeReturnRegister);
185
186  // The return PC has already been pushed on the stack.
187  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
188  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
189}
190
191void CodeGeneratorX86::GenerateFrameExit() {
192  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
193}
194
195void CodeGeneratorX86::Bind(Label* label) {
196  __ Bind(label);
197}
198
199void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) {
200  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
201}
202
203Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
204  switch (load->GetType()) {
205    case Primitive::kPrimLong:
206      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
207      break;
208
209    case Primitive::kPrimInt:
210    case Primitive::kPrimNot:
211      return Location::StackSlot(GetStackSlot(load->GetLocal()));
212
213    case Primitive::kPrimFloat:
214    case Primitive::kPrimDouble:
215      LOG(FATAL) << "Unimplemented type " << load->GetType();
216
217    case Primitive::kPrimBoolean:
218    case Primitive::kPrimByte:
219    case Primitive::kPrimChar:
220    case Primitive::kPrimShort:
221    case Primitive::kPrimVoid:
222      LOG(FATAL) << "Unexpected type " << load->GetType();
223  }
224
225  LOG(FATAL) << "Unreachable";
226  return Location();
227}
228
229static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX };
230static constexpr size_t kRuntimeParameterCoreRegistersLength =
231    arraysize(kRuntimeParameterCoreRegisters);
232
233class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
234 public:
235  InvokeRuntimeCallingConvention()
236      : CallingConvention(kRuntimeParameterCoreRegisters,
237                          kRuntimeParameterCoreRegistersLength) {}
238
239 private:
240  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
241};
242
243Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
244  switch (type) {
245    case Primitive::kPrimBoolean:
246    case Primitive::kPrimByte:
247    case Primitive::kPrimChar:
248    case Primitive::kPrimShort:
249    case Primitive::kPrimInt:
250    case Primitive::kPrimNot: {
251      uint32_t index = gp_index_++;
252      if (index < calling_convention.GetNumberOfRegisters()) {
253        return X86CpuLocation(calling_convention.GetRegisterAt(index));
254      } else {
255        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
256      }
257    }
258
259    case Primitive::kPrimLong: {
260      uint32_t index = gp_index_;
261      gp_index_ += 2;
262      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
263        return Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(
264            calling_convention.GetRegisterPairAt(index)));
265      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
266        return Location::QuickParameter(index);
267      } else {
268        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
269      }
270    }
271
272    case Primitive::kPrimDouble:
273    case Primitive::kPrimFloat:
274      LOG(FATAL) << "Unimplemented parameter type " << type;
275      break;
276
277    case Primitive::kPrimVoid:
278      LOG(FATAL) << "Unexpected parameter type " << type;
279      break;
280  }
281  return Location();
282}
283
284void CodeGeneratorX86::Move32(Location destination, Location source) {
285  if (source.Equals(destination)) {
286    return;
287  }
288  if (destination.IsRegister()) {
289    if (source.IsRegister()) {
290      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
291    } else {
292      DCHECK(source.IsStackSlot());
293      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
294    }
295  } else {
296    if (source.IsRegister()) {
297      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
298    } else {
299      DCHECK(source.IsStackSlot());
300      __ pushl(Address(ESP, source.GetStackIndex()));
301      __ popl(Address(ESP, destination.GetStackIndex()));
302    }
303  }
304}
305
306void CodeGeneratorX86::Move64(Location destination, Location source) {
307  if (source.Equals(destination)) {
308    return;
309  }
310  if (destination.IsRegister()) {
311    if (source.IsRegister()) {
312      __ movl(destination.AsX86().AsRegisterPairLow(), source.AsX86().AsRegisterPairLow());
313      __ movl(destination.AsX86().AsRegisterPairHigh(), source.AsX86().AsRegisterPairHigh());
314    } else if (source.IsQuickParameter()) {
315      uint32_t argument_index = source.GetQuickParameterIndex();
316      InvokeDexCallingConvention calling_convention;
317      __ movl(destination.AsX86().AsRegisterPairLow(),
318              calling_convention.GetRegisterAt(argument_index));
319      __ movl(destination.AsX86().AsRegisterPairHigh(), Address(ESP,
320          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
321    } else {
322      DCHECK(source.IsDoubleStackSlot());
323      __ movl(destination.AsX86().AsRegisterPairLow(), Address(ESP, source.GetStackIndex()));
324      __ movl(destination.AsX86().AsRegisterPairHigh(),
325              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
326    }
327  } else if (destination.IsQuickParameter()) {
328    InvokeDexCallingConvention calling_convention;
329    uint32_t argument_index = destination.GetQuickParameterIndex();
330    if (source.IsRegister()) {
331      __ movl(calling_convention.GetRegisterAt(argument_index), source.AsX86().AsRegisterPairLow());
332      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)),
333              source.AsX86().AsRegisterPairHigh());
334    } else {
335      DCHECK(source.IsDoubleStackSlot());
336      __ movl(calling_convention.GetRegisterAt(argument_index),
337              Address(ESP, source.GetStackIndex()));
338      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
339      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)));
340    }
341  } else {
342    if (source.IsRegister()) {
343      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsRegisterPairLow());
344      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
345              source.AsX86().AsRegisterPairHigh());
346    } else if (source.IsQuickParameter()) {
347      InvokeDexCallingConvention calling_convention;
348      uint32_t argument_index = source.GetQuickParameterIndex();
349      __ movl(Address(ESP, destination.GetStackIndex()),
350              calling_convention.GetRegisterAt(argument_index));
351      __ pushl(Address(ESP,
352          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
353      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
354    } else {
355      DCHECK(source.IsDoubleStackSlot());
356      __ pushl(Address(ESP, source.GetStackIndex()));
357      __ popl(Address(ESP, destination.GetStackIndex()));
358      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
359      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
360    }
361  }
362}
363
364void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
365  if (instruction->AsIntConstant() != nullptr) {
366    Immediate imm(instruction->AsIntConstant()->GetValue());
367    if (location.IsRegister()) {
368      __ movl(location.AsX86().AsCpuRegister(), imm);
369    } else {
370      __ movl(Address(ESP, location.GetStackIndex()), imm);
371    }
372  } else if (instruction->AsLongConstant() != nullptr) {
373    int64_t value = instruction->AsLongConstant()->GetValue();
374    if (location.IsRegister()) {
375      __ movl(location.AsX86().AsRegisterPairLow(), Immediate(Low32Bits(value)));
376      __ movl(location.AsX86().AsRegisterPairHigh(), Immediate(High32Bits(value)));
377    } else {
378      __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
379      __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
380    }
381  } else if (instruction->AsLoadLocal() != nullptr) {
382    switch (instruction->GetType()) {
383      case Primitive::kPrimBoolean:
384      case Primitive::kPrimByte:
385      case Primitive::kPrimChar:
386      case Primitive::kPrimShort:
387      case Primitive::kPrimInt:
388      case Primitive::kPrimNot:
389        Move32(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
390        break;
391
392      case Primitive::kPrimLong:
393        Move64(location, Location::DoubleStackSlot(
394            GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
395        break;
396
397      default:
398        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
399    }
400  } else {
401    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
402    switch (instruction->GetType()) {
403      case Primitive::kPrimBoolean:
404      case Primitive::kPrimByte:
405      case Primitive::kPrimChar:
406      case Primitive::kPrimShort:
407      case Primitive::kPrimInt:
408      case Primitive::kPrimNot:
409        Move32(location, instruction->GetLocations()->Out());
410        break;
411
412      case Primitive::kPrimLong:
413        Move64(location, instruction->GetLocations()->Out());
414        break;
415
416      default:
417        LOG(FATAL) << "Unimplemented type " << instruction->GetType();
418    }
419  }
420}
421
422void LocationsBuilderX86::VisitGoto(HGoto* got) {
423  got->SetLocations(nullptr);
424}
425
426void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
427  HBasicBlock* successor = got->GetSuccessor();
428  if (GetGraph()->GetExitBlock() == successor) {
429    codegen_->GenerateFrameExit();
430  } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
431    __ jmp(codegen_->GetLabelOf(successor));
432  }
433}
434
435void LocationsBuilderX86::VisitExit(HExit* exit) {
436  exit->SetLocations(nullptr);
437}
438
439void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
440  if (kIsDebugBuild) {
441    __ Comment("Unreachable");
442    __ int3();
443  }
444}
445
446void LocationsBuilderX86::VisitIf(HIf* if_instr) {
447  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
448  HInstruction* cond = if_instr->InputAt(0);
449  DCHECK(cond->IsCondition());
450  HCondition* condition = cond->AsCondition();
451  if (condition->NeedsMaterialization()) {
452    locations->SetInAt(0, Location::Any());
453  }
454  if_instr->SetLocations(locations);
455}
456
457void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
458  HInstruction* cond = if_instr->InputAt(0);
459  DCHECK(cond->IsCondition());
460  HCondition* condition = cond->AsCondition();
461  if (condition->NeedsMaterialization()) {
462    // Materialized condition, compare against 0
463    Location lhs = if_instr->GetLocations()->InAt(0);
464    if (lhs.IsRegister()) {
465      __ cmpl(lhs.AsX86().AsCpuRegister(), Immediate(0));
466    } else {
467      __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
468    }
469    __ j(kEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
470  } else {
471    Location lhs = condition->GetLocations()->InAt(0);
472    Location rhs = condition->GetLocations()->InAt(1);
473    // LHS is guaranteed to be in a register (see LocationsBuilderX86::VisitCondition).
474    if (rhs.IsRegister()) {
475      __ cmpl(lhs.AsX86().AsCpuRegister(), rhs.AsX86().AsCpuRegister());
476    } else if (rhs.IsConstant()) {
477      HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
478      Immediate imm(instruction->AsIntConstant()->GetValue());
479      __ cmpl(lhs.AsX86().AsCpuRegister(), imm);
480    } else {
481      __ cmpl(lhs.AsX86().AsCpuRegister(), Address(ESP, rhs.GetStackIndex()));
482    }
483    __ j(X86Condition(condition->GetCondition()),
484         codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
485  }
486  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
487    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
488  }
489}
490
491void LocationsBuilderX86::VisitLocal(HLocal* local) {
492  local->SetLocations(nullptr);
493}
494
495void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
496  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
497}
498
499void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
500  local->SetLocations(nullptr);
501}
502
503void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
504  // Nothing to do, this is driven by the code generator.
505}
506
507void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
508  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
509  switch (store->InputAt(1)->GetType()) {
510    case Primitive::kPrimBoolean:
511    case Primitive::kPrimByte:
512    case Primitive::kPrimChar:
513    case Primitive::kPrimShort:
514    case Primitive::kPrimInt:
515    case Primitive::kPrimNot:
516      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
517      break;
518
519    case Primitive::kPrimLong:
520      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
521      break;
522
523    default:
524      LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
525  }
526  store->SetLocations(locations);
527}
528
529void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
530}
531
532void LocationsBuilderX86::VisitCondition(HCondition* comp) {
533  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp);
534  locations->SetInAt(0, Location::RequiresRegister());
535  locations->SetInAt(1, Location::Any());
536  if (comp->NeedsMaterialization()) {
537    locations->SetOut(Location::RequiresRegister());
538  }
539  comp->SetLocations(locations);
540}
541
542void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
543  if (comp->NeedsMaterialization()) {
544    LocationSummary* locations = comp->GetLocations();
545    if (locations->InAt(1).IsRegister()) {
546      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
547              locations->InAt(1).AsX86().AsCpuRegister());
548    } else if (locations->InAt(1).IsConstant()) {
549      HConstant* instruction = locations->InAt(1).GetConstant();
550      Immediate imm(instruction->AsIntConstant()->GetValue());
551      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
552    } else {
553      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
554              Address(ESP, locations->InAt(1).GetStackIndex()));
555    }
556    __ setb(X86Condition(comp->GetCondition()), locations->Out().AsX86().AsCpuRegister());
557  }
558}
559
560void LocationsBuilderX86::VisitEqual(HEqual* comp) {
561  VisitCondition(comp);
562}
563
564void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
565  VisitCondition(comp);
566}
567
568void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
569  VisitCondition(comp);
570}
571
572void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
573  VisitCondition(comp);
574}
575
576void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
577  VisitCondition(comp);
578}
579
580void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
581  VisitCondition(comp);
582}
583
584void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
585  VisitCondition(comp);
586}
587
588void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
589  VisitCondition(comp);
590}
591
592void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
593  VisitCondition(comp);
594}
595
596void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
597  VisitCondition(comp);
598}
599
600void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
601  VisitCondition(comp);
602}
603
604void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
605  VisitCondition(comp);
606}
607
608void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
609  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
610  locations->SetOut(Location::ConstantLocation(constant));
611  constant->SetLocations(locations);
612}
613
614void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
615}
616
617void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
618  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
619  locations->SetOut(Location::ConstantLocation(constant));
620  constant->SetLocations(locations);
621}
622
623void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
624  // Will be generated at use site.
625}
626
627void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
628  ret->SetLocations(nullptr);
629}
630
631void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
632  codegen_->GenerateFrameExit();
633  __ ret();
634}
635
636void LocationsBuilderX86::VisitReturn(HReturn* ret) {
637  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
638  switch (ret->InputAt(0)->GetType()) {
639    case Primitive::kPrimBoolean:
640    case Primitive::kPrimByte:
641    case Primitive::kPrimChar:
642    case Primitive::kPrimShort:
643    case Primitive::kPrimInt:
644    case Primitive::kPrimNot:
645      locations->SetInAt(0, X86CpuLocation(EAX));
646      break;
647
648    case Primitive::kPrimLong:
649      locations->SetInAt(
650          0, Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
651      break;
652
653    default:
654      LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
655  }
656  ret->SetLocations(locations);
657}
658
659void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
660  if (kIsDebugBuild) {
661    switch (ret->InputAt(0)->GetType()) {
662      case Primitive::kPrimBoolean:
663      case Primitive::kPrimByte:
664      case Primitive::kPrimChar:
665      case Primitive::kPrimShort:
666      case Primitive::kPrimInt:
667      case Primitive::kPrimNot:
668        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsCpuRegister(), EAX);
669        break;
670
671      case Primitive::kPrimLong:
672        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsRegisterPair(), EAX_EDX);
673        break;
674
675      default:
676        LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
677    }
678  }
679  codegen_->GenerateFrameExit();
680  __ ret();
681}
682
683void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
684  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
685  locations->AddTemp(X86CpuLocation(EAX));
686
687  InvokeDexCallingConventionVisitor calling_convention_visitor;
688  for (size_t i = 0; i < invoke->InputCount(); i++) {
689    HInstruction* input = invoke->InputAt(i);
690    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
691  }
692
693  switch (invoke->GetType()) {
694    case Primitive::kPrimBoolean:
695    case Primitive::kPrimByte:
696    case Primitive::kPrimChar:
697    case Primitive::kPrimShort:
698    case Primitive::kPrimInt:
699    case Primitive::kPrimNot:
700      locations->SetOut(X86CpuLocation(EAX));
701      break;
702
703    case Primitive::kPrimLong:
704      locations->SetOut(Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
705      break;
706
707    case Primitive::kPrimVoid:
708      break;
709
710    case Primitive::kPrimDouble:
711    case Primitive::kPrimFloat:
712      LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
713      break;
714  }
715
716  invoke->SetLocations(locations);
717}
718
719void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
720  Register temp = invoke->GetLocations()->GetTemp(0).AsX86().AsCpuRegister();
721  uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
722  size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() +
723      invoke->GetIndexInDexCache() * kX86WordSize;
724
725  // TODO: Implement all kinds of calls:
726  // 1) boot -> boot
727  // 2) app -> boot
728  // 3) app -> app
729  //
730  // Currently we implement the app -> app logic, which looks up in the resolve cache.
731
732  // temp = method;
733  LoadCurrentMethod(temp);
734  // temp = temp->dex_cache_resolved_methods_;
735  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
736  // temp = temp[index_in_cache]
737  __ movl(temp, Address(temp, index_in_cache));
738  // (temp + offset_of_quick_compiled_code)()
739  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
740
741  codegen_->RecordPcInfo(invoke->GetDexPc());
742}
743
744void LocationsBuilderX86::VisitAdd(HAdd* add) {
745  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
746  switch (add->GetResultType()) {
747    case Primitive::kPrimInt:
748    case Primitive::kPrimLong: {
749      locations->SetInAt(0, Location::RequiresRegister());
750      locations->SetInAt(1, Location::Any());
751      locations->SetOut(Location::SameAsFirstInput());
752      break;
753    }
754
755    case Primitive::kPrimBoolean:
756    case Primitive::kPrimByte:
757    case Primitive::kPrimChar:
758    case Primitive::kPrimShort:
759      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
760      break;
761
762    default:
763      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
764  }
765  add->SetLocations(locations);
766}
767
768void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
769  LocationSummary* locations = add->GetLocations();
770  switch (add->GetResultType()) {
771    case Primitive::kPrimInt: {
772      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
773                locations->Out().AsX86().AsCpuRegister());
774      if (locations->InAt(1).IsRegister()) {
775        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
776                locations->InAt(1).AsX86().AsCpuRegister());
777      } else if (locations->InAt(1).IsConstant()) {
778        HConstant* instruction = locations->InAt(1).GetConstant();
779        Immediate imm(instruction->AsIntConstant()->GetValue());
780        __ addl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
781      } else {
782        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
783                Address(ESP, locations->InAt(1).GetStackIndex()));
784      }
785      break;
786    }
787
788    case Primitive::kPrimLong: {
789      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
790                locations->Out().AsX86().AsRegisterPair());
791      if (locations->InAt(1).IsRegister()) {
792        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
793                locations->InAt(1).AsX86().AsRegisterPairLow());
794        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
795                locations->InAt(1).AsX86().AsRegisterPairHigh());
796      } else {
797        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
798                Address(ESP, locations->InAt(1).GetStackIndex()));
799        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
800                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
801      }
802      break;
803    }
804
805    case Primitive::kPrimBoolean:
806    case Primitive::kPrimByte:
807    case Primitive::kPrimChar:
808    case Primitive::kPrimShort:
809      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
810      break;
811
812    default:
813      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
814  }
815}
816
817void LocationsBuilderX86::VisitSub(HSub* sub) {
818  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
819  switch (sub->GetResultType()) {
820    case Primitive::kPrimInt:
821    case Primitive::kPrimLong: {
822      locations->SetInAt(0, Location::RequiresRegister());
823      locations->SetInAt(1, Location::Any());
824      locations->SetOut(Location::SameAsFirstInput());
825      break;
826    }
827
828    case Primitive::kPrimBoolean:
829    case Primitive::kPrimByte:
830    case Primitive::kPrimChar:
831    case Primitive::kPrimShort:
832      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
833      break;
834
835    default:
836      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
837  }
838  sub->SetLocations(locations);
839}
840
841void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
842  LocationSummary* locations = sub->GetLocations();
843  switch (sub->GetResultType()) {
844    case Primitive::kPrimInt: {
845      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
846                locations->Out().AsX86().AsCpuRegister());
847      if (locations->InAt(1).IsRegister()) {
848        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
849                locations->InAt(1).AsX86().AsCpuRegister());
850      } else if (locations->InAt(1).IsConstant()) {
851        HConstant* instruction = locations->InAt(1).GetConstant();
852        Immediate imm(instruction->AsIntConstant()->GetValue());
853        __ subl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
854      } else {
855        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
856                Address(ESP, locations->InAt(1).GetStackIndex()));
857      }
858      break;
859    }
860
861    case Primitive::kPrimLong: {
862      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
863                locations->Out().AsX86().AsRegisterPair());
864      if (locations->InAt(1).IsRegister()) {
865        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
866                locations->InAt(1).AsX86().AsRegisterPairLow());
867        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
868                locations->InAt(1).AsX86().AsRegisterPairHigh());
869      } else {
870        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
871                Address(ESP, locations->InAt(1).GetStackIndex()));
872        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
873                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
874      }
875      break;
876    }
877
878    case Primitive::kPrimBoolean:
879    case Primitive::kPrimByte:
880    case Primitive::kPrimChar:
881    case Primitive::kPrimShort:
882      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
883      break;
884
885    default:
886      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
887  }
888}
889
890void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
891  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
892  locations->SetOut(X86CpuLocation(EAX));
893  InvokeRuntimeCallingConvention calling_convention;
894  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(0)));
895  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(1)));
896  instruction->SetLocations(locations);
897}
898
899void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
900  InvokeRuntimeCallingConvention calling_convention;
901  LoadCurrentMethod(calling_convention.GetRegisterAt(1));
902  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
903
904  __ fs()->call(
905      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
906
907  codegen_->RecordPcInfo(instruction->GetDexPc());
908}
909
910void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
911  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
912  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
913  if (location.IsStackSlot()) {
914    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
915  } else if (location.IsDoubleStackSlot()) {
916    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
917  }
918  locations->SetOut(location);
919  instruction->SetLocations(locations);
920}
921
922void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
923}
924
925void LocationsBuilderX86::VisitNot(HNot* instruction) {
926  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
927  locations->SetInAt(0, Location::RequiresRegister());
928  locations->SetOut(Location::SameAsFirstInput());
929  instruction->SetLocations(locations);
930}
931
932void InstructionCodeGeneratorX86::VisitNot(HNot* instruction) {
933  LocationSummary* locations = instruction->GetLocations();
934  Location out = locations->Out();
935  DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), out.AsX86().AsCpuRegister());
936  __ xorl(out.AsX86().AsCpuRegister(), Immediate(1));
937}
938
939void LocationsBuilderX86::VisitCompare(HCompare* compare) {
940  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
941  locations->SetInAt(0, Location::RequiresRegister());
942  locations->SetInAt(1, Location::Any());
943  locations->SetOut(Location::RequiresRegister());
944  compare->SetLocations(locations);
945}
946
947void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
948  Label greater, done;
949  LocationSummary* locations = compare->GetLocations();
950  switch (compare->InputAt(0)->GetType()) {
951    case Primitive::kPrimLong: {
952      Label less, greater, done;
953      Register output = locations->Out().AsX86().AsCpuRegister();
954      X86ManagedRegister left = locations->InAt(0).AsX86();
955      Location right = locations->InAt(1);
956      if (right.IsRegister()) {
957        __ cmpl(left.AsRegisterPairHigh(), right.AsX86().AsRegisterPairHigh());
958      } else {
959        DCHECK(right.IsDoubleStackSlot());
960        __ cmpl(left.AsRegisterPairHigh(), Address(ESP, right.GetHighStackIndex(kX86WordSize)));
961      }
962      __ j(kLess, &less);  // Signed compare.
963      __ j(kGreater, &greater);  // Signed compare.
964      if (right.IsRegister()) {
965        __ cmpl(left.AsRegisterPairLow(), right.AsX86().AsRegisterPairLow());
966      } else {
967        DCHECK(right.IsDoubleStackSlot());
968        __ cmpl(left.AsRegisterPairLow(), Address(ESP, right.GetStackIndex()));
969      }
970      __ movl(output, Immediate(0));
971      __ j(kEqual, &done);
972      __ j(kBelow, &less);  // Unsigned compare.
973
974      __ Bind(&greater);
975      __ movl(output, Immediate(1));
976      __ jmp(&done);
977
978      __ Bind(&less);
979      __ movl(output, Immediate(-1));
980
981      __ Bind(&done);
982      break;
983    }
984    default:
985      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
986  }
987}
988
989void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
990  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
991  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
992    locations->SetInAt(i, Location::Any());
993  }
994  locations->SetOut(Location::Any());
995  instruction->SetLocations(locations);
996}
997
998void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
999  LOG(FATAL) << "Unreachable";
1000}
1001
1002void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1003  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1004  locations->SetInAt(0, Location::RequiresRegister());
1005  Primitive::Type field_type = instruction->InputAt(1)->GetType();
1006  if (field_type == Primitive::kPrimBoolean || field_type == Primitive::kPrimByte) {
1007    // Ensure the value is in a byte register.
1008    locations->SetInAt(1, X86CpuLocation(EAX));
1009  } else {
1010    locations->SetInAt(1, Location::RequiresRegister());
1011  }
1012  instruction->SetLocations(locations);
1013}
1014
1015void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1016  LocationSummary* locations = instruction->GetLocations();
1017  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1018  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1019  Primitive::Type field_type = instruction->InputAt(1)->GetType();
1020
1021  switch (field_type) {
1022    case Primitive::kPrimBoolean:
1023    case Primitive::kPrimByte: {
1024      ByteRegister value = locations->InAt(1).AsX86().AsByteRegister();
1025      __ movb(Address(obj, offset), value);
1026      break;
1027    }
1028
1029    case Primitive::kPrimShort:
1030    case Primitive::kPrimChar: {
1031      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1032      __ movw(Address(obj, offset), value);
1033      break;
1034    }
1035
1036    case Primitive::kPrimInt:
1037    case Primitive::kPrimNot: {
1038      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1039      __ movl(Address(obj, offset), value);
1040      break;
1041    }
1042
1043    case Primitive::kPrimLong: {
1044      X86ManagedRegister value = locations->InAt(1).AsX86();
1045      __ movl(Address(obj, offset), value.AsRegisterPairLow());
1046      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh());
1047      break;
1048    }
1049
1050    case Primitive::kPrimFloat:
1051    case Primitive::kPrimDouble:
1052      LOG(FATAL) << "Unimplemented register type " << field_type;
1053
1054    case Primitive::kPrimVoid:
1055      LOG(FATAL) << "Unreachable type " << field_type;
1056  }
1057}
1058
1059void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1060  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1061  locations->SetInAt(0, Location::RequiresRegister());
1062  locations->SetOut(Location::RequiresRegister());
1063  instruction->SetLocations(locations);
1064}
1065
1066void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1067  LocationSummary* locations = instruction->GetLocations();
1068  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1069  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1070
1071  switch (instruction->GetType()) {
1072    case Primitive::kPrimBoolean: {
1073      Register out = locations->Out().AsX86().AsCpuRegister();
1074      __ movzxb(out, Address(obj, offset));
1075      break;
1076    }
1077
1078    case Primitive::kPrimByte: {
1079      Register out = locations->Out().AsX86().AsCpuRegister();
1080      __ movsxb(out, Address(obj, offset));
1081      break;
1082    }
1083
1084    case Primitive::kPrimShort: {
1085      Register out = locations->Out().AsX86().AsCpuRegister();
1086      __ movsxw(out, Address(obj, offset));
1087      break;
1088    }
1089
1090    case Primitive::kPrimChar: {
1091      Register out = locations->Out().AsX86().AsCpuRegister();
1092      __ movzxw(out, Address(obj, offset));
1093      break;
1094    }
1095
1096    case Primitive::kPrimInt:
1097    case Primitive::kPrimNot: {
1098      Register out = locations->Out().AsX86().AsCpuRegister();
1099      __ movl(out, Address(obj, offset));
1100      break;
1101    }
1102
1103    case Primitive::kPrimLong: {
1104      // TODO: support volatile.
1105      X86ManagedRegister out = locations->Out().AsX86();
1106      __ movl(out.AsRegisterPairLow(), Address(obj, offset));
1107      __ movl(out.AsRegisterPairHigh(), Address(obj, kX86WordSize + offset));
1108      break;
1109    }
1110
1111    case Primitive::kPrimFloat:
1112    case Primitive::kPrimDouble:
1113      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1114
1115    case Primitive::kPrimVoid:
1116      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1117  }
1118}
1119
1120void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
1121  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1122  locations->SetInAt(0, Location::Any());
1123  // TODO: Have a normalization phase that makes this instruction never used.
1124  locations->SetOut(Location::SameAsFirstInput());
1125  instruction->SetLocations(locations);
1126}
1127
1128void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
1129  SlowPathCode* slow_path =
1130      new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction->GetDexPc());
1131  codegen_->AddSlowPath(slow_path);
1132
1133  LocationSummary* locations = instruction->GetLocations();
1134  Location obj = locations->InAt(0);
1135  DCHECK(obj.Equals(locations->Out()));
1136
1137  if (obj.IsRegister()) {
1138    __ cmpl(obj.AsX86().AsCpuRegister(), Immediate(0));
1139  } else {
1140    DCHECK(locations->InAt(0).IsStackSlot());
1141    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
1142  }
1143  __ j(kEqual, slow_path->GetEntryLabel());
1144}
1145
1146void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
1147  temp->SetLocations(nullptr);
1148}
1149
1150void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
1151  // Nothing to do, this is driven by the code generator.
1152}
1153
1154void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
1155  LOG(FATAL) << "Unreachable";
1156}
1157
1158void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
1159  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1160}
1161
1162X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
1163  return codegen_->GetAssembler();
1164}
1165
1166void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
1167  ScratchRegisterScope ensure_scratch(
1168      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1169  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1170  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
1171  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
1172}
1173
1174void ParallelMoveResolverX86::EmitMove(size_t index) {
1175  MoveOperands* move = moves_.Get(index);
1176  Location source = move->GetSource();
1177  Location destination = move->GetDestination();
1178
1179  if (source.IsRegister()) {
1180    if (destination.IsRegister()) {
1181      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1182    } else {
1183      DCHECK(destination.IsStackSlot());
1184      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
1185    }
1186  } else if (source.IsStackSlot()) {
1187    if (destination.IsRegister()) {
1188      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
1189    } else {
1190      DCHECK(destination.IsStackSlot());
1191      MoveMemoryToMemory(destination.GetStackIndex(),
1192                         source.GetStackIndex());
1193    }
1194  } else if (source.IsConstant()) {
1195    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
1196    Immediate imm(instruction->AsIntConstant()->GetValue());
1197    if (destination.IsRegister()) {
1198      __ movl(destination.AsX86().AsCpuRegister(), imm);
1199    } else {
1200      __ movl(Address(ESP, destination.GetStackIndex()), imm);
1201    }
1202  } else {
1203    LOG(FATAL) << "Unimplemented";
1204  }
1205}
1206
1207void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
1208  Register suggested_scratch = reg == EAX ? EBX : EAX;
1209  ScratchRegisterScope ensure_scratch(
1210      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1211
1212  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1213  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
1214  __ movl(Address(ESP, mem + stack_offset), reg);
1215  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
1216}
1217
1218void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
1219  ScratchRegisterScope ensure_scratch1(
1220      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1221
1222  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
1223  ScratchRegisterScope ensure_scratch2(
1224      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1225
1226  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
1227  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
1228  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
1229  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
1230  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
1231  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
1232}
1233
1234void ParallelMoveResolverX86::EmitSwap(size_t index) {
1235  MoveOperands* move = moves_.Get(index);
1236  Location source = move->GetSource();
1237  Location destination = move->GetDestination();
1238
1239  if (source.IsRegister() && destination.IsRegister()) {
1240    __ xchgl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1241  } else if (source.IsRegister() && destination.IsStackSlot()) {
1242    Exchange(source.AsX86().AsCpuRegister(), destination.GetStackIndex());
1243  } else if (source.IsStackSlot() && destination.IsRegister()) {
1244    Exchange(destination.AsX86().AsCpuRegister(), source.GetStackIndex());
1245  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1246    Exchange(destination.GetStackIndex(), source.GetStackIndex());
1247  } else {
1248    LOG(FATAL) << "Unimplemented";
1249  }
1250}
1251
1252void ParallelMoveResolverX86::SpillScratch(int reg) {
1253  __ pushl(static_cast<Register>(reg));
1254}
1255
1256void ParallelMoveResolverX86::RestoreScratch(int reg) {
1257  __ popl(static_cast<Register>(reg));
1258}
1259
1260}  // namespace x86
1261}  // namespace art
1262