code_generator_x86.cc revision 1a43dd78d054dbad8d7af9ba4829ea2f1cb70b53
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18#include "gc/accounting/card_table.h"
19#include "utils/assembler.h"
20#include "utils/x86/assembler_x86.h"
21#include "utils/x86/managed_register_x86.h"
22
23#include "entrypoints/quick/quick_entrypoints.h"
24#include "mirror/array.h"
25#include "mirror/art_method.h"
26#include "thread.h"
27
28namespace art {
29
30x86::X86ManagedRegister Location::AsX86() const {
31  return reg().AsX86();
32}
33
34namespace x86 {
35
36#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
37
38class NullCheckSlowPathX86 : public SlowPathCode {
39 public:
40  explicit NullCheckSlowPathX86(uint32_t dex_pc) : dex_pc_(dex_pc) {}
41
42  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
43    __ Bind(GetEntryLabel());
44    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
45    codegen->RecordPcInfo(dex_pc_);
46  }
47
48 private:
49  const uint32_t dex_pc_;
50  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
51};
52
53#undef __
54#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
55
56inline Condition X86Condition(IfCondition cond) {
57  switch (cond) {
58    case kCondEQ: return kEqual;
59    case kCondNE: return kNotEqual;
60    case kCondLT: return kLess;
61    case kCondLE: return kLessEqual;
62    case kCondGT: return kGreater;
63    case kCondGE: return kGreaterEqual;
64    default:
65      LOG(FATAL) << "Unknown if condition";
66  }
67  return kEqual;
68}
69
70static constexpr int kNumberOfPushedRegistersAtEntry = 1;
71static constexpr int kCurrentMethodStackOffset = 0;
72
73void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
74  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
75}
76
77void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
78  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
79}
80
81CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
82    : CodeGenerator(graph, kNumberOfRegIds),
83      location_builder_(graph, this),
84      instruction_visitor_(graph, this),
85      move_resolver_(graph->GetArena(), this) {}
86
87size_t CodeGeneratorX86::FrameEntrySpillSize() const {
88  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
89}
90
91static bool* GetBlockedRegisterPairs(bool* blocked_registers) {
92  return blocked_registers + kNumberOfAllocIds;
93}
94
95ManagedRegister CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type,
96                                                       bool* blocked_registers) const {
97  switch (type) {
98    case Primitive::kPrimLong: {
99      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
100      size_t reg = AllocateFreeRegisterInternal(blocked_register_pairs, kNumberOfRegisterPairs);
101      X86ManagedRegister pair =
102          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
103      blocked_registers[pair.AsRegisterPairLow()] = true;
104      blocked_registers[pair.AsRegisterPairHigh()] = true;
105      // Block all other register pairs that share a register with `pair`.
106      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
107        X86ManagedRegister current =
108            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
109        if (current.AsRegisterPairLow() == pair.AsRegisterPairLow()
110            || current.AsRegisterPairLow() == pair.AsRegisterPairHigh()
111            || current.AsRegisterPairHigh() == pair.AsRegisterPairLow()
112            || current.AsRegisterPairHigh() == pair.AsRegisterPairHigh()) {
113          blocked_register_pairs[i] = true;
114        }
115      }
116      return pair;
117    }
118
119    case Primitive::kPrimByte:
120    case Primitive::kPrimBoolean:
121    case Primitive::kPrimChar:
122    case Primitive::kPrimShort:
123    case Primitive::kPrimInt:
124    case Primitive::kPrimNot: {
125      Register reg = static_cast<Register>(
126          AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters));
127      // Block all register pairs that contain `reg`.
128      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
129      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
130        X86ManagedRegister current =
131            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
132        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
133          blocked_register_pairs[i] = true;
134        }
135      }
136      return X86ManagedRegister::FromCpuRegister(reg);
137    }
138
139    case Primitive::kPrimFloat:
140    case Primitive::kPrimDouble:
141      LOG(FATAL) << "Unimplemented register type " << type;
142
143    case Primitive::kPrimVoid:
144      LOG(FATAL) << "Unreachable type " << type;
145  }
146
147  return ManagedRegister::NoRegister();
148}
149
150void CodeGeneratorX86::SetupBlockedRegisters(bool* blocked_registers) const {
151  bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
152
153  // Don't allocate the dalvik style register pair passing.
154  blocked_register_pairs[ECX_EDX] = true;
155
156  // Stack register is always reserved.
157  blocked_registers[ESP] = true;
158
159  // TODO: We currently don't use Quick's callee saved registers.
160  blocked_registers[EBP] = true;
161  blocked_registers[ESI] = true;
162  blocked_registers[EDI] = true;
163  blocked_register_pairs[EAX_EDI] = true;
164  blocked_register_pairs[EDX_EDI] = true;
165  blocked_register_pairs[ECX_EDI] = true;
166  blocked_register_pairs[EBX_EDI] = true;
167}
168
169size_t CodeGeneratorX86::GetNumberOfRegisters() const {
170  return kNumberOfRegIds;
171}
172
173static Location X86CpuLocation(Register reg) {
174  return Location::RegisterLocation(X86ManagedRegister::FromCpuRegister(reg));
175}
176
177InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
178      : HGraphVisitor(graph),
179        assembler_(codegen->GetAssembler()),
180        codegen_(codegen) {}
181
182void CodeGeneratorX86::GenerateFrameEntry() {
183  // Create a fake register to mimic Quick.
184  static const int kFakeReturnRegister = 8;
185  core_spill_mask_ |= (1 << kFakeReturnRegister);
186
187  // The return PC has already been pushed on the stack.
188  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
189  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
190}
191
192void CodeGeneratorX86::GenerateFrameExit() {
193  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
194}
195
196void CodeGeneratorX86::Bind(Label* label) {
197  __ Bind(label);
198}
199
200void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) {
201  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
202}
203
204Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
205  switch (load->GetType()) {
206    case Primitive::kPrimLong:
207      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
208      break;
209
210    case Primitive::kPrimInt:
211    case Primitive::kPrimNot:
212      return Location::StackSlot(GetStackSlot(load->GetLocal()));
213
214    case Primitive::kPrimFloat:
215    case Primitive::kPrimDouble:
216      LOG(FATAL) << "Unimplemented type " << load->GetType();
217
218    case Primitive::kPrimBoolean:
219    case Primitive::kPrimByte:
220    case Primitive::kPrimChar:
221    case Primitive::kPrimShort:
222    case Primitive::kPrimVoid:
223      LOG(FATAL) << "Unexpected type " << load->GetType();
224  }
225
226  LOG(FATAL) << "Unreachable";
227  return Location();
228}
229
230static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX };
231static constexpr size_t kRuntimeParameterCoreRegistersLength =
232    arraysize(kRuntimeParameterCoreRegisters);
233
234class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
235 public:
236  InvokeRuntimeCallingConvention()
237      : CallingConvention(kRuntimeParameterCoreRegisters,
238                          kRuntimeParameterCoreRegistersLength) {}
239
240 private:
241  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
242};
243
244Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
245  switch (type) {
246    case Primitive::kPrimBoolean:
247    case Primitive::kPrimByte:
248    case Primitive::kPrimChar:
249    case Primitive::kPrimShort:
250    case Primitive::kPrimInt:
251    case Primitive::kPrimNot: {
252      uint32_t index = gp_index_++;
253      if (index < calling_convention.GetNumberOfRegisters()) {
254        return X86CpuLocation(calling_convention.GetRegisterAt(index));
255      } else {
256        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
257      }
258    }
259
260    case Primitive::kPrimLong: {
261      uint32_t index = gp_index_;
262      gp_index_ += 2;
263      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
264        return Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(
265            calling_convention.GetRegisterPairAt(index)));
266      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
267        return Location::QuickParameter(index);
268      } else {
269        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
270      }
271    }
272
273    case Primitive::kPrimDouble:
274    case Primitive::kPrimFloat:
275      LOG(FATAL) << "Unimplemented parameter type " << type;
276      break;
277
278    case Primitive::kPrimVoid:
279      LOG(FATAL) << "Unexpected parameter type " << type;
280      break;
281  }
282  return Location();
283}
284
285void CodeGeneratorX86::Move32(Location destination, Location source) {
286  if (source.Equals(destination)) {
287    return;
288  }
289  if (destination.IsRegister()) {
290    if (source.IsRegister()) {
291      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
292    } else {
293      DCHECK(source.IsStackSlot());
294      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
295    }
296  } else {
297    if (source.IsRegister()) {
298      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
299    } else {
300      DCHECK(source.IsStackSlot());
301      __ pushl(Address(ESP, source.GetStackIndex()));
302      __ popl(Address(ESP, destination.GetStackIndex()));
303    }
304  }
305}
306
307void CodeGeneratorX86::Move64(Location destination, Location source) {
308  if (source.Equals(destination)) {
309    return;
310  }
311  if (destination.IsRegister()) {
312    if (source.IsRegister()) {
313      __ movl(destination.AsX86().AsRegisterPairLow(), source.AsX86().AsRegisterPairLow());
314      __ movl(destination.AsX86().AsRegisterPairHigh(), source.AsX86().AsRegisterPairHigh());
315    } else if (source.IsQuickParameter()) {
316      uint32_t argument_index = source.GetQuickParameterIndex();
317      InvokeDexCallingConvention calling_convention;
318      __ movl(destination.AsX86().AsRegisterPairLow(),
319              calling_convention.GetRegisterAt(argument_index));
320      __ movl(destination.AsX86().AsRegisterPairHigh(), Address(ESP,
321          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
322    } else {
323      DCHECK(source.IsDoubleStackSlot());
324      __ movl(destination.AsX86().AsRegisterPairLow(), Address(ESP, source.GetStackIndex()));
325      __ movl(destination.AsX86().AsRegisterPairHigh(),
326              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
327    }
328  } else if (destination.IsQuickParameter()) {
329    InvokeDexCallingConvention calling_convention;
330    uint32_t argument_index = destination.GetQuickParameterIndex();
331    if (source.IsRegister()) {
332      __ movl(calling_convention.GetRegisterAt(argument_index), source.AsX86().AsRegisterPairLow());
333      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)),
334              source.AsX86().AsRegisterPairHigh());
335    } else {
336      DCHECK(source.IsDoubleStackSlot());
337      __ movl(calling_convention.GetRegisterAt(argument_index),
338              Address(ESP, source.GetStackIndex()));
339      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
340      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)));
341    }
342  } else {
343    if (source.IsRegister()) {
344      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsRegisterPairLow());
345      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
346              source.AsX86().AsRegisterPairHigh());
347    } else if (source.IsQuickParameter()) {
348      InvokeDexCallingConvention calling_convention;
349      uint32_t argument_index = source.GetQuickParameterIndex();
350      __ movl(Address(ESP, destination.GetStackIndex()),
351              calling_convention.GetRegisterAt(argument_index));
352      __ pushl(Address(ESP,
353          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
354      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
355    } else {
356      DCHECK(source.IsDoubleStackSlot());
357      __ pushl(Address(ESP, source.GetStackIndex()));
358      __ popl(Address(ESP, destination.GetStackIndex()));
359      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
360      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
361    }
362  }
363}
364
365void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
366  if (instruction->AsIntConstant() != nullptr) {
367    Immediate imm(instruction->AsIntConstant()->GetValue());
368    if (location.IsRegister()) {
369      __ movl(location.AsX86().AsCpuRegister(), imm);
370    } else {
371      __ movl(Address(ESP, location.GetStackIndex()), imm);
372    }
373  } else if (instruction->AsLongConstant() != nullptr) {
374    int64_t value = instruction->AsLongConstant()->GetValue();
375    if (location.IsRegister()) {
376      __ movl(location.AsX86().AsRegisterPairLow(), Immediate(Low32Bits(value)));
377      __ movl(location.AsX86().AsRegisterPairHigh(), Immediate(High32Bits(value)));
378    } else {
379      __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
380      __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
381    }
382  } else if (instruction->AsLoadLocal() != nullptr) {
383    switch (instruction->GetType()) {
384      case Primitive::kPrimBoolean:
385      case Primitive::kPrimByte:
386      case Primitive::kPrimChar:
387      case Primitive::kPrimShort:
388      case Primitive::kPrimInt:
389      case Primitive::kPrimNot:
390        Move32(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
391        break;
392
393      case Primitive::kPrimLong:
394        Move64(location, Location::DoubleStackSlot(
395            GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
396        break;
397
398      default:
399        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
400    }
401  } else {
402    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
403    switch (instruction->GetType()) {
404      case Primitive::kPrimBoolean:
405      case Primitive::kPrimByte:
406      case Primitive::kPrimChar:
407      case Primitive::kPrimShort:
408      case Primitive::kPrimInt:
409      case Primitive::kPrimNot:
410        Move32(location, instruction->GetLocations()->Out());
411        break;
412
413      case Primitive::kPrimLong:
414        Move64(location, instruction->GetLocations()->Out());
415        break;
416
417      default:
418        LOG(FATAL) << "Unimplemented type " << instruction->GetType();
419    }
420  }
421}
422
423void LocationsBuilderX86::VisitGoto(HGoto* got) {
424  got->SetLocations(nullptr);
425}
426
427void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
428  HBasicBlock* successor = got->GetSuccessor();
429  if (GetGraph()->GetExitBlock() == successor) {
430    codegen_->GenerateFrameExit();
431  } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
432    __ jmp(codegen_->GetLabelOf(successor));
433  }
434}
435
436void LocationsBuilderX86::VisitExit(HExit* exit) {
437  exit->SetLocations(nullptr);
438}
439
440void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
441  if (kIsDebugBuild) {
442    __ Comment("Unreachable");
443    __ int3();
444  }
445}
446
447void LocationsBuilderX86::VisitIf(HIf* if_instr) {
448  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
449  HInstruction* cond = if_instr->InputAt(0);
450  DCHECK(cond->IsCondition());
451  HCondition* condition = cond->AsCondition();
452  if (condition->NeedsMaterialization()) {
453    locations->SetInAt(0, Location::Any());
454  }
455  if_instr->SetLocations(locations);
456}
457
458void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
459  HInstruction* cond = if_instr->InputAt(0);
460  DCHECK(cond->IsCondition());
461  HCondition* condition = cond->AsCondition();
462  if (condition->NeedsMaterialization()) {
463    // Materialized condition, compare against 0
464    Location lhs = if_instr->GetLocations()->InAt(0);
465    if (lhs.IsRegister()) {
466      __ cmpl(lhs.AsX86().AsCpuRegister(), Immediate(0));
467    } else {
468      __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
469    }
470    __ j(kEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
471  } else {
472    Location lhs = condition->GetLocations()->InAt(0);
473    Location rhs = condition->GetLocations()->InAt(1);
474    // LHS is guaranteed to be in a register (see LocationsBuilderX86::VisitCondition).
475    if (rhs.IsRegister()) {
476      __ cmpl(lhs.AsX86().AsCpuRegister(), rhs.AsX86().AsCpuRegister());
477    } else if (rhs.IsConstant()) {
478      HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
479      Immediate imm(instruction->AsIntConstant()->GetValue());
480      __ cmpl(lhs.AsX86().AsCpuRegister(), imm);
481    } else {
482      __ cmpl(lhs.AsX86().AsCpuRegister(), Address(ESP, rhs.GetStackIndex()));
483    }
484    __ j(X86Condition(condition->GetCondition()),
485         codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
486  }
487  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
488    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
489  }
490}
491
492void LocationsBuilderX86::VisitLocal(HLocal* local) {
493  local->SetLocations(nullptr);
494}
495
496void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
497  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
498}
499
500void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
501  local->SetLocations(nullptr);
502}
503
504void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
505  // Nothing to do, this is driven by the code generator.
506}
507
508void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
509  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
510  switch (store->InputAt(1)->GetType()) {
511    case Primitive::kPrimBoolean:
512    case Primitive::kPrimByte:
513    case Primitive::kPrimChar:
514    case Primitive::kPrimShort:
515    case Primitive::kPrimInt:
516    case Primitive::kPrimNot:
517      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
518      break;
519
520    case Primitive::kPrimLong:
521      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
522      break;
523
524    default:
525      LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
526  }
527  store->SetLocations(locations);
528}
529
530void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
531}
532
533void LocationsBuilderX86::VisitCondition(HCondition* comp) {
534  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp);
535  locations->SetInAt(0, Location::RequiresRegister());
536  locations->SetInAt(1, Location::Any());
537  if (comp->NeedsMaterialization()) {
538    locations->SetOut(Location::RequiresRegister());
539  }
540  comp->SetLocations(locations);
541}
542
543void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
544  if (comp->NeedsMaterialization()) {
545    LocationSummary* locations = comp->GetLocations();
546    if (locations->InAt(1).IsRegister()) {
547      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
548              locations->InAt(1).AsX86().AsCpuRegister());
549    } else if (locations->InAt(1).IsConstant()) {
550      HConstant* instruction = locations->InAt(1).GetConstant();
551      Immediate imm(instruction->AsIntConstant()->GetValue());
552      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
553    } else {
554      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
555              Address(ESP, locations->InAt(1).GetStackIndex()));
556    }
557    __ setb(X86Condition(comp->GetCondition()), locations->Out().AsX86().AsCpuRegister());
558  }
559}
560
561void LocationsBuilderX86::VisitEqual(HEqual* comp) {
562  VisitCondition(comp);
563}
564
565void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
566  VisitCondition(comp);
567}
568
569void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
570  VisitCondition(comp);
571}
572
573void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
574  VisitCondition(comp);
575}
576
577void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
578  VisitCondition(comp);
579}
580
581void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
582  VisitCondition(comp);
583}
584
585void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
586  VisitCondition(comp);
587}
588
589void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
590  VisitCondition(comp);
591}
592
593void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
594  VisitCondition(comp);
595}
596
597void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
598  VisitCondition(comp);
599}
600
601void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
602  VisitCondition(comp);
603}
604
605void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
606  VisitCondition(comp);
607}
608
609void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
610  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
611  locations->SetOut(Location::ConstantLocation(constant));
612  constant->SetLocations(locations);
613}
614
615void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
616}
617
618void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
619  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
620  locations->SetOut(Location::ConstantLocation(constant));
621  constant->SetLocations(locations);
622}
623
624void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
625  // Will be generated at use site.
626}
627
628void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
629  ret->SetLocations(nullptr);
630}
631
632void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
633  codegen_->GenerateFrameExit();
634  __ ret();
635}
636
637void LocationsBuilderX86::VisitReturn(HReturn* ret) {
638  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
639  switch (ret->InputAt(0)->GetType()) {
640    case Primitive::kPrimBoolean:
641    case Primitive::kPrimByte:
642    case Primitive::kPrimChar:
643    case Primitive::kPrimShort:
644    case Primitive::kPrimInt:
645    case Primitive::kPrimNot:
646      locations->SetInAt(0, X86CpuLocation(EAX));
647      break;
648
649    case Primitive::kPrimLong:
650      locations->SetInAt(
651          0, Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
652      break;
653
654    default:
655      LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
656  }
657  ret->SetLocations(locations);
658}
659
660void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
661  if (kIsDebugBuild) {
662    switch (ret->InputAt(0)->GetType()) {
663      case Primitive::kPrimBoolean:
664      case Primitive::kPrimByte:
665      case Primitive::kPrimChar:
666      case Primitive::kPrimShort:
667      case Primitive::kPrimInt:
668      case Primitive::kPrimNot:
669        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsCpuRegister(), EAX);
670        break;
671
672      case Primitive::kPrimLong:
673        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsRegisterPair(), EAX_EDX);
674        break;
675
676      default:
677        LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
678    }
679  }
680  codegen_->GenerateFrameExit();
681  __ ret();
682}
683
684void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
685  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
686  locations->AddTemp(X86CpuLocation(EAX));
687
688  InvokeDexCallingConventionVisitor calling_convention_visitor;
689  for (size_t i = 0; i < invoke->InputCount(); i++) {
690    HInstruction* input = invoke->InputAt(i);
691    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
692  }
693
694  switch (invoke->GetType()) {
695    case Primitive::kPrimBoolean:
696    case Primitive::kPrimByte:
697    case Primitive::kPrimChar:
698    case Primitive::kPrimShort:
699    case Primitive::kPrimInt:
700    case Primitive::kPrimNot:
701      locations->SetOut(X86CpuLocation(EAX));
702      break;
703
704    case Primitive::kPrimLong:
705      locations->SetOut(Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
706      break;
707
708    case Primitive::kPrimVoid:
709      break;
710
711    case Primitive::kPrimDouble:
712    case Primitive::kPrimFloat:
713      LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
714      break;
715  }
716
717  invoke->SetLocations(locations);
718}
719
720void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
721  Register temp = invoke->GetLocations()->GetTemp(0).AsX86().AsCpuRegister();
722  uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
723  size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() +
724      invoke->GetIndexInDexCache() * kX86WordSize;
725
726  // TODO: Implement all kinds of calls:
727  // 1) boot -> boot
728  // 2) app -> boot
729  // 3) app -> app
730  //
731  // Currently we implement the app -> app logic, which looks up in the resolve cache.
732
733  // temp = method;
734  LoadCurrentMethod(temp);
735  // temp = temp->dex_cache_resolved_methods_;
736  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
737  // temp = temp[index_in_cache]
738  __ movl(temp, Address(temp, index_in_cache));
739  // (temp + offset_of_quick_compiled_code)()
740  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
741
742  codegen_->RecordPcInfo(invoke->GetDexPc());
743}
744
745void LocationsBuilderX86::VisitAdd(HAdd* add) {
746  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
747  switch (add->GetResultType()) {
748    case Primitive::kPrimInt:
749    case Primitive::kPrimLong: {
750      locations->SetInAt(0, Location::RequiresRegister());
751      locations->SetInAt(1, Location::Any());
752      locations->SetOut(Location::SameAsFirstInput());
753      break;
754    }
755
756    case Primitive::kPrimBoolean:
757    case Primitive::kPrimByte:
758    case Primitive::kPrimChar:
759    case Primitive::kPrimShort:
760      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
761      break;
762
763    default:
764      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
765  }
766  add->SetLocations(locations);
767}
768
769void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
770  LocationSummary* locations = add->GetLocations();
771  switch (add->GetResultType()) {
772    case Primitive::kPrimInt: {
773      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
774                locations->Out().AsX86().AsCpuRegister());
775      if (locations->InAt(1).IsRegister()) {
776        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
777                locations->InAt(1).AsX86().AsCpuRegister());
778      } else if (locations->InAt(1).IsConstant()) {
779        HConstant* instruction = locations->InAt(1).GetConstant();
780        Immediate imm(instruction->AsIntConstant()->GetValue());
781        __ addl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
782      } else {
783        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
784                Address(ESP, locations->InAt(1).GetStackIndex()));
785      }
786      break;
787    }
788
789    case Primitive::kPrimLong: {
790      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
791                locations->Out().AsX86().AsRegisterPair());
792      if (locations->InAt(1).IsRegister()) {
793        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
794                locations->InAt(1).AsX86().AsRegisterPairLow());
795        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
796                locations->InAt(1).AsX86().AsRegisterPairHigh());
797      } else {
798        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
799                Address(ESP, locations->InAt(1).GetStackIndex()));
800        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
801                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
802      }
803      break;
804    }
805
806    case Primitive::kPrimBoolean:
807    case Primitive::kPrimByte:
808    case Primitive::kPrimChar:
809    case Primitive::kPrimShort:
810      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
811      break;
812
813    default:
814      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
815  }
816}
817
818void LocationsBuilderX86::VisitSub(HSub* sub) {
819  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
820  switch (sub->GetResultType()) {
821    case Primitive::kPrimInt:
822    case Primitive::kPrimLong: {
823      locations->SetInAt(0, Location::RequiresRegister());
824      locations->SetInAt(1, Location::Any());
825      locations->SetOut(Location::SameAsFirstInput());
826      break;
827    }
828
829    case Primitive::kPrimBoolean:
830    case Primitive::kPrimByte:
831    case Primitive::kPrimChar:
832    case Primitive::kPrimShort:
833      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
834      break;
835
836    default:
837      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
838  }
839  sub->SetLocations(locations);
840}
841
842void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
843  LocationSummary* locations = sub->GetLocations();
844  switch (sub->GetResultType()) {
845    case Primitive::kPrimInt: {
846      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
847                locations->Out().AsX86().AsCpuRegister());
848      if (locations->InAt(1).IsRegister()) {
849        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
850                locations->InAt(1).AsX86().AsCpuRegister());
851      } else if (locations->InAt(1).IsConstant()) {
852        HConstant* instruction = locations->InAt(1).GetConstant();
853        Immediate imm(instruction->AsIntConstant()->GetValue());
854        __ subl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
855      } else {
856        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
857                Address(ESP, locations->InAt(1).GetStackIndex()));
858      }
859      break;
860    }
861
862    case Primitive::kPrimLong: {
863      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
864                locations->Out().AsX86().AsRegisterPair());
865      if (locations->InAt(1).IsRegister()) {
866        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
867                locations->InAt(1).AsX86().AsRegisterPairLow());
868        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
869                locations->InAt(1).AsX86().AsRegisterPairHigh());
870      } else {
871        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
872                Address(ESP, locations->InAt(1).GetStackIndex()));
873        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
874                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
875      }
876      break;
877    }
878
879    case Primitive::kPrimBoolean:
880    case Primitive::kPrimByte:
881    case Primitive::kPrimChar:
882    case Primitive::kPrimShort:
883      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
884      break;
885
886    default:
887      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
888  }
889}
890
891void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
892  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
893  locations->SetOut(X86CpuLocation(EAX));
894  InvokeRuntimeCallingConvention calling_convention;
895  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(0)));
896  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(1)));
897  instruction->SetLocations(locations);
898}
899
900void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
901  InvokeRuntimeCallingConvention calling_convention;
902  LoadCurrentMethod(calling_convention.GetRegisterAt(1));
903  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
904
905  __ fs()->call(
906      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
907
908  codegen_->RecordPcInfo(instruction->GetDexPc());
909}
910
911void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
912  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
913  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
914  if (location.IsStackSlot()) {
915    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
916  } else if (location.IsDoubleStackSlot()) {
917    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
918  }
919  locations->SetOut(location);
920  instruction->SetLocations(locations);
921}
922
923void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
924}
925
926void LocationsBuilderX86::VisitNot(HNot* instruction) {
927  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
928  locations->SetInAt(0, Location::RequiresRegister());
929  locations->SetOut(Location::SameAsFirstInput());
930  instruction->SetLocations(locations);
931}
932
933void InstructionCodeGeneratorX86::VisitNot(HNot* instruction) {
934  LocationSummary* locations = instruction->GetLocations();
935  Location out = locations->Out();
936  DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), out.AsX86().AsCpuRegister());
937  __ xorl(out.AsX86().AsCpuRegister(), Immediate(1));
938}
939
940void LocationsBuilderX86::VisitCompare(HCompare* compare) {
941  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
942  locations->SetInAt(0, Location::RequiresRegister());
943  locations->SetInAt(1, Location::Any());
944  locations->SetOut(Location::RequiresRegister());
945  compare->SetLocations(locations);
946}
947
948void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
949  Label greater, done;
950  LocationSummary* locations = compare->GetLocations();
951  switch (compare->InputAt(0)->GetType()) {
952    case Primitive::kPrimLong: {
953      Label less, greater, done;
954      Register output = locations->Out().AsX86().AsCpuRegister();
955      X86ManagedRegister left = locations->InAt(0).AsX86();
956      Location right = locations->InAt(1);
957      if (right.IsRegister()) {
958        __ cmpl(left.AsRegisterPairHigh(), right.AsX86().AsRegisterPairHigh());
959      } else {
960        DCHECK(right.IsDoubleStackSlot());
961        __ cmpl(left.AsRegisterPairHigh(), Address(ESP, right.GetHighStackIndex(kX86WordSize)));
962      }
963      __ j(kLess, &less);  // Signed compare.
964      __ j(kGreater, &greater);  // Signed compare.
965      if (right.IsRegister()) {
966        __ cmpl(left.AsRegisterPairLow(), right.AsX86().AsRegisterPairLow());
967      } else {
968        DCHECK(right.IsDoubleStackSlot());
969        __ cmpl(left.AsRegisterPairLow(), Address(ESP, right.GetStackIndex()));
970      }
971      __ movl(output, Immediate(0));
972      __ j(kEqual, &done);
973      __ j(kBelow, &less);  // Unsigned compare.
974
975      __ Bind(&greater);
976      __ movl(output, Immediate(1));
977      __ jmp(&done);
978
979      __ Bind(&less);
980      __ movl(output, Immediate(-1));
981
982      __ Bind(&done);
983      break;
984    }
985    default:
986      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
987  }
988}
989
990void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
991  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
992  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
993    locations->SetInAt(i, Location::Any());
994  }
995  locations->SetOut(Location::Any());
996  instruction->SetLocations(locations);
997}
998
999void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
1000  LOG(FATAL) << "Unreachable";
1001}
1002
1003void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1004  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1005  locations->SetInAt(0, Location::RequiresRegister());
1006  Primitive::Type field_type = instruction->InputAt(1)->GetType();
1007  if (field_type == Primitive::kPrimBoolean || field_type == Primitive::kPrimByte) {
1008    // Ensure the value is in a byte register.
1009    locations->SetInAt(1, X86CpuLocation(EAX));
1010  } else {
1011    locations->SetInAt(1, Location::RequiresRegister());
1012  }
1013  // Temporary registers for the write barrier.
1014  if (instruction->InputAt(1)->GetType() == Primitive::kPrimNot) {
1015    locations->AddTemp(Location::RequiresRegister());
1016    // Ensure the card is in a byte register.
1017    locations->AddTemp(X86CpuLocation(ECX));
1018  }
1019  instruction->SetLocations(locations);
1020}
1021
1022void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1023  LocationSummary* locations = instruction->GetLocations();
1024  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1025  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1026  Primitive::Type field_type = instruction->InputAt(1)->GetType();
1027
1028  switch (field_type) {
1029    case Primitive::kPrimBoolean:
1030    case Primitive::kPrimByte: {
1031      ByteRegister value = locations->InAt(1).AsX86().AsByteRegister();
1032      __ movb(Address(obj, offset), value);
1033      break;
1034    }
1035
1036    case Primitive::kPrimShort:
1037    case Primitive::kPrimChar: {
1038      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1039      __ movw(Address(obj, offset), value);
1040      break;
1041    }
1042
1043    case Primitive::kPrimInt: {
1044      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1045      __ movl(Address(obj, offset), value);
1046      break;
1047    }
1048
1049    case Primitive::kPrimNot: {
1050      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1051      __ movl(Address(obj, offset), value);
1052      Label is_null;
1053      Register temp = locations->GetTemp(0).AsX86().AsCpuRegister();
1054      Register card = locations->GetTemp(1).AsX86().AsCpuRegister();
1055      __ testl(value, value);
1056      __ j(kEqual, &is_null);
1057      __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
1058      __ movl(temp, obj);
1059      __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
1060      __ movb(Address(temp, card, TIMES_1, 0),  locations->GetTemp(1).AsX86().AsByteRegister());
1061      __ Bind(&is_null);
1062      break;
1063    }
1064
1065    case Primitive::kPrimLong: {
1066      X86ManagedRegister value = locations->InAt(1).AsX86();
1067      __ movl(Address(obj, offset), value.AsRegisterPairLow());
1068      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh());
1069      break;
1070    }
1071
1072    case Primitive::kPrimFloat:
1073    case Primitive::kPrimDouble:
1074      LOG(FATAL) << "Unimplemented register type " << field_type;
1075
1076    case Primitive::kPrimVoid:
1077      LOG(FATAL) << "Unreachable type " << field_type;
1078  }
1079}
1080
1081void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1082  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1083  locations->SetInAt(0, Location::RequiresRegister());
1084  locations->SetOut(Location::RequiresRegister());
1085  instruction->SetLocations(locations);
1086}
1087
1088void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1089  LocationSummary* locations = instruction->GetLocations();
1090  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1091  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1092
1093  switch (instruction->GetType()) {
1094    case Primitive::kPrimBoolean: {
1095      Register out = locations->Out().AsX86().AsCpuRegister();
1096      __ movzxb(out, Address(obj, offset));
1097      break;
1098    }
1099
1100    case Primitive::kPrimByte: {
1101      Register out = locations->Out().AsX86().AsCpuRegister();
1102      __ movsxb(out, Address(obj, offset));
1103      break;
1104    }
1105
1106    case Primitive::kPrimShort: {
1107      Register out = locations->Out().AsX86().AsCpuRegister();
1108      __ movsxw(out, Address(obj, offset));
1109      break;
1110    }
1111
1112    case Primitive::kPrimChar: {
1113      Register out = locations->Out().AsX86().AsCpuRegister();
1114      __ movzxw(out, Address(obj, offset));
1115      break;
1116    }
1117
1118    case Primitive::kPrimInt:
1119    case Primitive::kPrimNot: {
1120      Register out = locations->Out().AsX86().AsCpuRegister();
1121      __ movl(out, Address(obj, offset));
1122      break;
1123    }
1124
1125    case Primitive::kPrimLong: {
1126      // TODO: support volatile.
1127      X86ManagedRegister out = locations->Out().AsX86();
1128      __ movl(out.AsRegisterPairLow(), Address(obj, offset));
1129      __ movl(out.AsRegisterPairHigh(), Address(obj, kX86WordSize + offset));
1130      break;
1131    }
1132
1133    case Primitive::kPrimFloat:
1134    case Primitive::kPrimDouble:
1135      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1136
1137    case Primitive::kPrimVoid:
1138      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1139  }
1140}
1141
1142void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
1143  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1144  locations->SetInAt(0, Location::Any());
1145  // TODO: Have a normalization phase that makes this instruction never used.
1146  locations->SetOut(Location::SameAsFirstInput());
1147  instruction->SetLocations(locations);
1148}
1149
1150void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
1151  SlowPathCode* slow_path =
1152      new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction->GetDexPc());
1153  codegen_->AddSlowPath(slow_path);
1154
1155  LocationSummary* locations = instruction->GetLocations();
1156  Location obj = locations->InAt(0);
1157  DCHECK(obj.Equals(locations->Out()));
1158
1159  if (obj.IsRegister()) {
1160    __ cmpl(obj.AsX86().AsCpuRegister(), Immediate(0));
1161  } else {
1162    DCHECK(locations->InAt(0).IsStackSlot());
1163    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
1164  }
1165  __ j(kEqual, slow_path->GetEntryLabel());
1166}
1167
1168void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
1169  temp->SetLocations(nullptr);
1170}
1171
1172void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
1173  // Nothing to do, this is driven by the code generator.
1174}
1175
1176void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
1177  LOG(FATAL) << "Unreachable";
1178}
1179
1180void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
1181  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1182}
1183
1184X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
1185  return codegen_->GetAssembler();
1186}
1187
1188void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
1189  ScratchRegisterScope ensure_scratch(
1190      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1191  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1192  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
1193  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
1194}
1195
1196void ParallelMoveResolverX86::EmitMove(size_t index) {
1197  MoveOperands* move = moves_.Get(index);
1198  Location source = move->GetSource();
1199  Location destination = move->GetDestination();
1200
1201  if (source.IsRegister()) {
1202    if (destination.IsRegister()) {
1203      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1204    } else {
1205      DCHECK(destination.IsStackSlot());
1206      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
1207    }
1208  } else if (source.IsStackSlot()) {
1209    if (destination.IsRegister()) {
1210      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
1211    } else {
1212      DCHECK(destination.IsStackSlot());
1213      MoveMemoryToMemory(destination.GetStackIndex(),
1214                         source.GetStackIndex());
1215    }
1216  } else if (source.IsConstant()) {
1217    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
1218    Immediate imm(instruction->AsIntConstant()->GetValue());
1219    if (destination.IsRegister()) {
1220      __ movl(destination.AsX86().AsCpuRegister(), imm);
1221    } else {
1222      __ movl(Address(ESP, destination.GetStackIndex()), imm);
1223    }
1224  } else {
1225    LOG(FATAL) << "Unimplemented";
1226  }
1227}
1228
1229void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
1230  Register suggested_scratch = reg == EAX ? EBX : EAX;
1231  ScratchRegisterScope ensure_scratch(
1232      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1233
1234  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1235  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
1236  __ movl(Address(ESP, mem + stack_offset), reg);
1237  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
1238}
1239
1240void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
1241  ScratchRegisterScope ensure_scratch1(
1242      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1243
1244  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
1245  ScratchRegisterScope ensure_scratch2(
1246      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1247
1248  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
1249  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
1250  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
1251  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
1252  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
1253  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
1254}
1255
1256void ParallelMoveResolverX86::EmitSwap(size_t index) {
1257  MoveOperands* move = moves_.Get(index);
1258  Location source = move->GetSource();
1259  Location destination = move->GetDestination();
1260
1261  if (source.IsRegister() && destination.IsRegister()) {
1262    __ xchgl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1263  } else if (source.IsRegister() && destination.IsStackSlot()) {
1264    Exchange(source.AsX86().AsCpuRegister(), destination.GetStackIndex());
1265  } else if (source.IsStackSlot() && destination.IsRegister()) {
1266    Exchange(destination.AsX86().AsCpuRegister(), source.GetStackIndex());
1267  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1268    Exchange(destination.GetStackIndex(), source.GetStackIndex());
1269  } else {
1270    LOG(FATAL) << "Unimplemented";
1271  }
1272}
1273
1274void ParallelMoveResolverX86::SpillScratch(int reg) {
1275  __ pushl(static_cast<Register>(reg));
1276}
1277
1278void ParallelMoveResolverX86::RestoreScratch(int reg) {
1279  __ popl(static_cast<Register>(reg));
1280}
1281
1282}  // namespace x86
1283}  // namespace art
1284