code_generator_x86.cc revision 3c7bb98698f77af10372cf31824d3bb115d9bf0f
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18#include "gc/accounting/card_table.h"
19#include "utils/assembler.h"
20#include "utils/stack_checks.h"
21#include "utils/x86/assembler_x86.h"
22#include "utils/x86/managed_register_x86.h"
23
24#include "entrypoints/quick/quick_entrypoints.h"
25#include "mirror/array.h"
26#include "mirror/art_method.h"
27#include "thread.h"
28
29namespace art {
30
31x86::X86ManagedRegister Location::AsX86() const {
32  return reg().AsX86();
33}
34
35namespace x86 {
36
37static constexpr bool kExplicitStackOverflowCheck = false;
38
39static constexpr int kNumberOfPushedRegistersAtEntry = 1;
40static constexpr int kCurrentMethodStackOffset = 0;
41
42static Location X86CpuLocation(Register reg) {
43  return Location::RegisterLocation(X86ManagedRegister::FromCpuRegister(reg));
44}
45
46static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX };
47static constexpr size_t kRuntimeParameterCoreRegistersLength =
48    arraysize(kRuntimeParameterCoreRegisters);
49
50class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
51 public:
52  InvokeRuntimeCallingConvention()
53      : CallingConvention(kRuntimeParameterCoreRegisters,
54                          kRuntimeParameterCoreRegistersLength) {}
55
56 private:
57  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
58};
59
60#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
61
62class NullCheckSlowPathX86 : public SlowPathCode {
63 public:
64  explicit NullCheckSlowPathX86(uint32_t dex_pc) : dex_pc_(dex_pc) {}
65
66  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
67    __ Bind(GetEntryLabel());
68    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
69    codegen->RecordPcInfo(dex_pc_);
70  }
71
72 private:
73  const uint32_t dex_pc_;
74  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
75};
76
77class StackOverflowCheckSlowPathX86 : public SlowPathCode {
78 public:
79  StackOverflowCheckSlowPathX86() {}
80
81  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
82    __ Bind(GetEntryLabel());
83    __ addl(ESP,
84            Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
85    __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow)));
86  }
87
88 private:
89  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86);
90};
91
92class BoundsCheckSlowPathX86 : public SlowPathCode {
93 public:
94  explicit BoundsCheckSlowPathX86(uint32_t dex_pc,
95                                  Location index_location,
96                                  Location length_location)
97      : dex_pc_(dex_pc), index_location_(index_location), length_location_(length_location) {}
98
99  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100    CodeGeneratorX86* x86_codegen = reinterpret_cast<CodeGeneratorX86*>(codegen);
101    __ Bind(GetEntryLabel());
102    InvokeRuntimeCallingConvention calling_convention;
103    x86_codegen->Move32(X86CpuLocation(calling_convention.GetRegisterAt(0)), index_location_);
104    x86_codegen->Move32(X86CpuLocation(calling_convention.GetRegisterAt(1)), length_location_);
105    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowArrayBounds)));
106    codegen->RecordPcInfo(dex_pc_);
107  }
108
109 private:
110  const uint32_t dex_pc_;
111  const Location index_location_;
112  const Location length_location_;
113
114  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
115};
116
117#undef __
118#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
119
120inline Condition X86Condition(IfCondition cond) {
121  switch (cond) {
122    case kCondEQ: return kEqual;
123    case kCondNE: return kNotEqual;
124    case kCondLT: return kLess;
125    case kCondLE: return kLessEqual;
126    case kCondGT: return kGreater;
127    case kCondGE: return kGreaterEqual;
128    default:
129      LOG(FATAL) << "Unknown if condition";
130  }
131  return kEqual;
132}
133
134void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
135  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
136}
137
138void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
139  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
140}
141
142CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
143    : CodeGenerator(graph, kNumberOfRegIds),
144      location_builder_(graph, this),
145      instruction_visitor_(graph, this),
146      move_resolver_(graph->GetArena(), this) {}
147
148size_t CodeGeneratorX86::FrameEntrySpillSize() const {
149  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
150}
151
152static bool* GetBlockedRegisterPairs(bool* blocked_registers) {
153  return blocked_registers + kNumberOfAllocIds;
154}
155
156ManagedRegister CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type,
157                                                       bool* blocked_registers) const {
158  switch (type) {
159    case Primitive::kPrimLong: {
160      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
161      size_t reg = AllocateFreeRegisterInternal(blocked_register_pairs, kNumberOfRegisterPairs);
162      X86ManagedRegister pair =
163          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
164      blocked_registers[pair.AsRegisterPairLow()] = true;
165      blocked_registers[pair.AsRegisterPairHigh()] = true;
166      // Block all other register pairs that share a register with `pair`.
167      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
168        X86ManagedRegister current =
169            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
170        if (current.AsRegisterPairLow() == pair.AsRegisterPairLow()
171            || current.AsRegisterPairLow() == pair.AsRegisterPairHigh()
172            || current.AsRegisterPairHigh() == pair.AsRegisterPairLow()
173            || current.AsRegisterPairHigh() == pair.AsRegisterPairHigh()) {
174          blocked_register_pairs[i] = true;
175        }
176      }
177      return pair;
178    }
179
180    case Primitive::kPrimByte:
181    case Primitive::kPrimBoolean:
182    case Primitive::kPrimChar:
183    case Primitive::kPrimShort:
184    case Primitive::kPrimInt:
185    case Primitive::kPrimNot: {
186      Register reg = static_cast<Register>(
187          AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters));
188      // Block all register pairs that contain `reg`.
189      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
190      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
191        X86ManagedRegister current =
192            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
193        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
194          blocked_register_pairs[i] = true;
195        }
196      }
197      return X86ManagedRegister::FromCpuRegister(reg);
198    }
199
200    case Primitive::kPrimFloat:
201    case Primitive::kPrimDouble:
202      LOG(FATAL) << "Unimplemented register type " << type;
203
204    case Primitive::kPrimVoid:
205      LOG(FATAL) << "Unreachable type " << type;
206  }
207
208  return ManagedRegister::NoRegister();
209}
210
211void CodeGeneratorX86::SetupBlockedRegisters(bool* blocked_registers) const {
212  bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
213
214  // Don't allocate the dalvik style register pair passing.
215  blocked_register_pairs[ECX_EDX] = true;
216
217  // Stack register is always reserved.
218  blocked_registers[ESP] = true;
219
220  // TODO: We currently don't use Quick's callee saved registers.
221  blocked_registers[EBP] = true;
222  blocked_registers[ESI] = true;
223  blocked_registers[EDI] = true;
224  blocked_register_pairs[EAX_EDI] = true;
225  blocked_register_pairs[EDX_EDI] = true;
226  blocked_register_pairs[ECX_EDI] = true;
227  blocked_register_pairs[EBX_EDI] = true;
228}
229
230size_t CodeGeneratorX86::GetNumberOfRegisters() const {
231  return kNumberOfRegIds;
232}
233
234InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
235      : HGraphVisitor(graph),
236        assembler_(codegen->GetAssembler()),
237        codegen_(codegen) {}
238
239void CodeGeneratorX86::GenerateFrameEntry() {
240  // Create a fake register to mimic Quick.
241  static const int kFakeReturnRegister = 8;
242  core_spill_mask_ |= (1 << kFakeReturnRegister);
243
244  bool skip_overflow_check = IsLeafMethod() && !IsLargeFrame(GetFrameSize(), InstructionSet::kX86);
245  if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
246    __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
247    RecordPcInfo(0);
248  }
249
250  // The return PC has already been pushed on the stack.
251  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
252
253  if (!skip_overflow_check && kExplicitStackOverflowCheck) {
254    SlowPathCode* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
255    AddSlowPath(slow_path);
256
257    __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>()));
258    __ j(kLess, slow_path->GetEntryLabel());
259  }
260
261  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
262}
263
264void CodeGeneratorX86::GenerateFrameExit() {
265  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
266}
267
268void CodeGeneratorX86::Bind(Label* label) {
269  __ Bind(label);
270}
271
272void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) {
273  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
274}
275
276Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
277  switch (load->GetType()) {
278    case Primitive::kPrimLong:
279      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
280      break;
281
282    case Primitive::kPrimInt:
283    case Primitive::kPrimNot:
284      return Location::StackSlot(GetStackSlot(load->GetLocal()));
285
286    case Primitive::kPrimFloat:
287    case Primitive::kPrimDouble:
288      LOG(FATAL) << "Unimplemented type " << load->GetType();
289
290    case Primitive::kPrimBoolean:
291    case Primitive::kPrimByte:
292    case Primitive::kPrimChar:
293    case Primitive::kPrimShort:
294    case Primitive::kPrimVoid:
295      LOG(FATAL) << "Unexpected type " << load->GetType();
296  }
297
298  LOG(FATAL) << "Unreachable";
299  return Location();
300}
301
302Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
303  switch (type) {
304    case Primitive::kPrimBoolean:
305    case Primitive::kPrimByte:
306    case Primitive::kPrimChar:
307    case Primitive::kPrimShort:
308    case Primitive::kPrimInt:
309    case Primitive::kPrimNot: {
310      uint32_t index = gp_index_++;
311      if (index < calling_convention.GetNumberOfRegisters()) {
312        return X86CpuLocation(calling_convention.GetRegisterAt(index));
313      } else {
314        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
315      }
316    }
317
318    case Primitive::kPrimLong: {
319      uint32_t index = gp_index_;
320      gp_index_ += 2;
321      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
322        return Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(
323            calling_convention.GetRegisterPairAt(index)));
324      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
325        return Location::QuickParameter(index);
326      } else {
327        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
328      }
329    }
330
331    case Primitive::kPrimDouble:
332    case Primitive::kPrimFloat:
333      LOG(FATAL) << "Unimplemented parameter type " << type;
334      break;
335
336    case Primitive::kPrimVoid:
337      LOG(FATAL) << "Unexpected parameter type " << type;
338      break;
339  }
340  return Location();
341}
342
343void CodeGeneratorX86::Move32(Location destination, Location source) {
344  if (source.Equals(destination)) {
345    return;
346  }
347  if (destination.IsRegister()) {
348    if (source.IsRegister()) {
349      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
350    } else {
351      DCHECK(source.IsStackSlot());
352      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
353    }
354  } else {
355    if (source.IsRegister()) {
356      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
357    } else {
358      DCHECK(source.IsStackSlot());
359      __ pushl(Address(ESP, source.GetStackIndex()));
360      __ popl(Address(ESP, destination.GetStackIndex()));
361    }
362  }
363}
364
365void CodeGeneratorX86::Move64(Location destination, Location source) {
366  if (source.Equals(destination)) {
367    return;
368  }
369  if (destination.IsRegister()) {
370    if (source.IsRegister()) {
371      __ movl(destination.AsX86().AsRegisterPairLow(), source.AsX86().AsRegisterPairLow());
372      __ movl(destination.AsX86().AsRegisterPairHigh(), source.AsX86().AsRegisterPairHigh());
373    } else if (source.IsQuickParameter()) {
374      uint32_t argument_index = source.GetQuickParameterIndex();
375      InvokeDexCallingConvention calling_convention;
376      __ movl(destination.AsX86().AsRegisterPairLow(),
377              calling_convention.GetRegisterAt(argument_index));
378      __ movl(destination.AsX86().AsRegisterPairHigh(), Address(ESP,
379          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
380    } else {
381      DCHECK(source.IsDoubleStackSlot());
382      __ movl(destination.AsX86().AsRegisterPairLow(), Address(ESP, source.GetStackIndex()));
383      __ movl(destination.AsX86().AsRegisterPairHigh(),
384              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
385    }
386  } else if (destination.IsQuickParameter()) {
387    InvokeDexCallingConvention calling_convention;
388    uint32_t argument_index = destination.GetQuickParameterIndex();
389    if (source.IsRegister()) {
390      __ movl(calling_convention.GetRegisterAt(argument_index), source.AsX86().AsRegisterPairLow());
391      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)),
392              source.AsX86().AsRegisterPairHigh());
393    } else {
394      DCHECK(source.IsDoubleStackSlot());
395      __ movl(calling_convention.GetRegisterAt(argument_index),
396              Address(ESP, source.GetStackIndex()));
397      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
398      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)));
399    }
400  } else {
401    if (source.IsRegister()) {
402      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsRegisterPairLow());
403      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
404              source.AsX86().AsRegisterPairHigh());
405    } else if (source.IsQuickParameter()) {
406      InvokeDexCallingConvention calling_convention;
407      uint32_t argument_index = source.GetQuickParameterIndex();
408      __ movl(Address(ESP, destination.GetStackIndex()),
409              calling_convention.GetRegisterAt(argument_index));
410      __ pushl(Address(ESP,
411          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
412      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
413    } else {
414      DCHECK(source.IsDoubleStackSlot());
415      __ pushl(Address(ESP, source.GetStackIndex()));
416      __ popl(Address(ESP, destination.GetStackIndex()));
417      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
418      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
419    }
420  }
421}
422
423void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
424  if (instruction->AsIntConstant() != nullptr) {
425    Immediate imm(instruction->AsIntConstant()->GetValue());
426    if (location.IsRegister()) {
427      __ movl(location.AsX86().AsCpuRegister(), imm);
428    } else {
429      __ movl(Address(ESP, location.GetStackIndex()), imm);
430    }
431  } else if (instruction->AsLongConstant() != nullptr) {
432    int64_t value = instruction->AsLongConstant()->GetValue();
433    if (location.IsRegister()) {
434      __ movl(location.AsX86().AsRegisterPairLow(), Immediate(Low32Bits(value)));
435      __ movl(location.AsX86().AsRegisterPairHigh(), Immediate(High32Bits(value)));
436    } else {
437      __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
438      __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
439    }
440  } else if (instruction->AsLoadLocal() != nullptr) {
441    switch (instruction->GetType()) {
442      case Primitive::kPrimBoolean:
443      case Primitive::kPrimByte:
444      case Primitive::kPrimChar:
445      case Primitive::kPrimShort:
446      case Primitive::kPrimInt:
447      case Primitive::kPrimNot:
448        Move32(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
449        break;
450
451      case Primitive::kPrimLong:
452        Move64(location, Location::DoubleStackSlot(
453            GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
454        break;
455
456      default:
457        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
458    }
459  } else {
460    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
461    switch (instruction->GetType()) {
462      case Primitive::kPrimBoolean:
463      case Primitive::kPrimByte:
464      case Primitive::kPrimChar:
465      case Primitive::kPrimShort:
466      case Primitive::kPrimInt:
467      case Primitive::kPrimNot:
468        Move32(location, instruction->GetLocations()->Out());
469        break;
470
471      case Primitive::kPrimLong:
472        Move64(location, instruction->GetLocations()->Out());
473        break;
474
475      default:
476        LOG(FATAL) << "Unimplemented type " << instruction->GetType();
477    }
478  }
479}
480
481void LocationsBuilderX86::VisitGoto(HGoto* got) {
482  got->SetLocations(nullptr);
483}
484
485void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
486  HBasicBlock* successor = got->GetSuccessor();
487  if (GetGraph()->GetExitBlock() == successor) {
488    codegen_->GenerateFrameExit();
489  } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
490    __ jmp(codegen_->GetLabelOf(successor));
491  }
492}
493
494void LocationsBuilderX86::VisitExit(HExit* exit) {
495  exit->SetLocations(nullptr);
496}
497
498void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
499  if (kIsDebugBuild) {
500    __ Comment("Unreachable");
501    __ int3();
502  }
503}
504
505void LocationsBuilderX86::VisitIf(HIf* if_instr) {
506  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
507  HInstruction* cond = if_instr->InputAt(0);
508  DCHECK(cond->IsCondition());
509  HCondition* condition = cond->AsCondition();
510  if (condition->NeedsMaterialization()) {
511    locations->SetInAt(0, Location::Any());
512  }
513  if_instr->SetLocations(locations);
514}
515
516void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
517  HInstruction* cond = if_instr->InputAt(0);
518  DCHECK(cond->IsCondition());
519  HCondition* condition = cond->AsCondition();
520  if (condition->NeedsMaterialization()) {
521    // Materialized condition, compare against 0
522    Location lhs = if_instr->GetLocations()->InAt(0);
523    if (lhs.IsRegister()) {
524      __ cmpl(lhs.AsX86().AsCpuRegister(), Immediate(0));
525    } else {
526      __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
527    }
528    __ j(kEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
529  } else {
530    Location lhs = condition->GetLocations()->InAt(0);
531    Location rhs = condition->GetLocations()->InAt(1);
532    // LHS is guaranteed to be in a register (see LocationsBuilderX86::VisitCondition).
533    if (rhs.IsRegister()) {
534      __ cmpl(lhs.AsX86().AsCpuRegister(), rhs.AsX86().AsCpuRegister());
535    } else if (rhs.IsConstant()) {
536      HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
537      Immediate imm(instruction->AsIntConstant()->GetValue());
538      __ cmpl(lhs.AsX86().AsCpuRegister(), imm);
539    } else {
540      __ cmpl(lhs.AsX86().AsCpuRegister(), Address(ESP, rhs.GetStackIndex()));
541    }
542    __ j(X86Condition(condition->GetCondition()),
543         codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
544  }
545  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
546    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
547  }
548}
549
550void LocationsBuilderX86::VisitLocal(HLocal* local) {
551  local->SetLocations(nullptr);
552}
553
554void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
555  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
556}
557
558void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
559  local->SetLocations(nullptr);
560}
561
562void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
563  // Nothing to do, this is driven by the code generator.
564}
565
566void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
567  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
568  switch (store->InputAt(1)->GetType()) {
569    case Primitive::kPrimBoolean:
570    case Primitive::kPrimByte:
571    case Primitive::kPrimChar:
572    case Primitive::kPrimShort:
573    case Primitive::kPrimInt:
574    case Primitive::kPrimNot:
575      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
576      break;
577
578    case Primitive::kPrimLong:
579      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
580      break;
581
582    default:
583      LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
584  }
585  store->SetLocations(locations);
586}
587
588void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
589}
590
591void LocationsBuilderX86::VisitCondition(HCondition* comp) {
592  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp);
593  locations->SetInAt(0, Location::RequiresRegister());
594  locations->SetInAt(1, Location::Any());
595  if (comp->NeedsMaterialization()) {
596    locations->SetOut(Location::RequiresRegister());
597  }
598  comp->SetLocations(locations);
599}
600
601void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
602  if (comp->NeedsMaterialization()) {
603    LocationSummary* locations = comp->GetLocations();
604    if (locations->InAt(1).IsRegister()) {
605      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
606              locations->InAt(1).AsX86().AsCpuRegister());
607    } else if (locations->InAt(1).IsConstant()) {
608      HConstant* instruction = locations->InAt(1).GetConstant();
609      Immediate imm(instruction->AsIntConstant()->GetValue());
610      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
611    } else {
612      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
613              Address(ESP, locations->InAt(1).GetStackIndex()));
614    }
615    __ setb(X86Condition(comp->GetCondition()), locations->Out().AsX86().AsCpuRegister());
616  }
617}
618
619void LocationsBuilderX86::VisitEqual(HEqual* comp) {
620  VisitCondition(comp);
621}
622
623void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
624  VisitCondition(comp);
625}
626
627void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
628  VisitCondition(comp);
629}
630
631void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
632  VisitCondition(comp);
633}
634
635void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
636  VisitCondition(comp);
637}
638
639void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
640  VisitCondition(comp);
641}
642
643void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
644  VisitCondition(comp);
645}
646
647void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
648  VisitCondition(comp);
649}
650
651void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
652  VisitCondition(comp);
653}
654
655void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
656  VisitCondition(comp);
657}
658
659void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
660  VisitCondition(comp);
661}
662
663void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
664  VisitCondition(comp);
665}
666
667void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
668  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
669  locations->SetOut(Location::ConstantLocation(constant));
670  constant->SetLocations(locations);
671}
672
673void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
674}
675
676void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
677  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
678  locations->SetOut(Location::ConstantLocation(constant));
679  constant->SetLocations(locations);
680}
681
682void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
683  // Will be generated at use site.
684}
685
686void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
687  ret->SetLocations(nullptr);
688}
689
690void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
691  codegen_->GenerateFrameExit();
692  __ ret();
693}
694
695void LocationsBuilderX86::VisitReturn(HReturn* ret) {
696  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
697  switch (ret->InputAt(0)->GetType()) {
698    case Primitive::kPrimBoolean:
699    case Primitive::kPrimByte:
700    case Primitive::kPrimChar:
701    case Primitive::kPrimShort:
702    case Primitive::kPrimInt:
703    case Primitive::kPrimNot:
704      locations->SetInAt(0, X86CpuLocation(EAX));
705      break;
706
707    case Primitive::kPrimLong:
708      locations->SetInAt(
709          0, Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
710      break;
711
712    default:
713      LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
714  }
715  ret->SetLocations(locations);
716}
717
718void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
719  if (kIsDebugBuild) {
720    switch (ret->InputAt(0)->GetType()) {
721      case Primitive::kPrimBoolean:
722      case Primitive::kPrimByte:
723      case Primitive::kPrimChar:
724      case Primitive::kPrimShort:
725      case Primitive::kPrimInt:
726      case Primitive::kPrimNot:
727        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsCpuRegister(), EAX);
728        break;
729
730      case Primitive::kPrimLong:
731        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsRegisterPair(), EAX_EDX);
732        break;
733
734      default:
735        LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
736    }
737  }
738  codegen_->GenerateFrameExit();
739  __ ret();
740}
741
742void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
743  codegen_->MarkNotLeaf();
744  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
745  locations->AddTemp(X86CpuLocation(EAX));
746
747  InvokeDexCallingConventionVisitor calling_convention_visitor;
748  for (size_t i = 0; i < invoke->InputCount(); i++) {
749    HInstruction* input = invoke->InputAt(i);
750    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
751  }
752
753  switch (invoke->GetType()) {
754    case Primitive::kPrimBoolean:
755    case Primitive::kPrimByte:
756    case Primitive::kPrimChar:
757    case Primitive::kPrimShort:
758    case Primitive::kPrimInt:
759    case Primitive::kPrimNot:
760      locations->SetOut(X86CpuLocation(EAX));
761      break;
762
763    case Primitive::kPrimLong:
764      locations->SetOut(Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
765      break;
766
767    case Primitive::kPrimVoid:
768      break;
769
770    case Primitive::kPrimDouble:
771    case Primitive::kPrimFloat:
772      LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
773      break;
774  }
775
776  invoke->SetLocations(locations);
777}
778
779void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
780  Register temp = invoke->GetLocations()->GetTemp(0).AsX86().AsCpuRegister();
781  uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
782  size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() +
783      invoke->GetIndexInDexCache() * kX86WordSize;
784
785  // TODO: Implement all kinds of calls:
786  // 1) boot -> boot
787  // 2) app -> boot
788  // 3) app -> app
789  //
790  // Currently we implement the app -> app logic, which looks up in the resolve cache.
791
792  // temp = method;
793  LoadCurrentMethod(temp);
794  // temp = temp->dex_cache_resolved_methods_;
795  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
796  // temp = temp[index_in_cache]
797  __ movl(temp, Address(temp, index_in_cache));
798  // (temp + offset_of_quick_compiled_code)()
799  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
800
801  DCHECK(!codegen_->IsLeafMethod());
802  codegen_->RecordPcInfo(invoke->GetDexPc());
803}
804
805void LocationsBuilderX86::VisitAdd(HAdd* add) {
806  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
807  switch (add->GetResultType()) {
808    case Primitive::kPrimInt:
809    case Primitive::kPrimLong: {
810      locations->SetInAt(0, Location::RequiresRegister());
811      locations->SetInAt(1, Location::Any());
812      locations->SetOut(Location::SameAsFirstInput());
813      break;
814    }
815
816    case Primitive::kPrimBoolean:
817    case Primitive::kPrimByte:
818    case Primitive::kPrimChar:
819    case Primitive::kPrimShort:
820      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
821      break;
822
823    default:
824      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
825  }
826  add->SetLocations(locations);
827}
828
829void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
830  LocationSummary* locations = add->GetLocations();
831  switch (add->GetResultType()) {
832    case Primitive::kPrimInt: {
833      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
834                locations->Out().AsX86().AsCpuRegister());
835      if (locations->InAt(1).IsRegister()) {
836        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
837                locations->InAt(1).AsX86().AsCpuRegister());
838      } else if (locations->InAt(1).IsConstant()) {
839        HConstant* instruction = locations->InAt(1).GetConstant();
840        Immediate imm(instruction->AsIntConstant()->GetValue());
841        __ addl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
842      } else {
843        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
844                Address(ESP, locations->InAt(1).GetStackIndex()));
845      }
846      break;
847    }
848
849    case Primitive::kPrimLong: {
850      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
851                locations->Out().AsX86().AsRegisterPair());
852      if (locations->InAt(1).IsRegister()) {
853        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
854                locations->InAt(1).AsX86().AsRegisterPairLow());
855        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
856                locations->InAt(1).AsX86().AsRegisterPairHigh());
857      } else {
858        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
859                Address(ESP, locations->InAt(1).GetStackIndex()));
860        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
861                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
862      }
863      break;
864    }
865
866    case Primitive::kPrimBoolean:
867    case Primitive::kPrimByte:
868    case Primitive::kPrimChar:
869    case Primitive::kPrimShort:
870      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
871      break;
872
873    default:
874      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
875  }
876}
877
878void LocationsBuilderX86::VisitSub(HSub* sub) {
879  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
880  switch (sub->GetResultType()) {
881    case Primitive::kPrimInt:
882    case Primitive::kPrimLong: {
883      locations->SetInAt(0, Location::RequiresRegister());
884      locations->SetInAt(1, Location::Any());
885      locations->SetOut(Location::SameAsFirstInput());
886      break;
887    }
888
889    case Primitive::kPrimBoolean:
890    case Primitive::kPrimByte:
891    case Primitive::kPrimChar:
892    case Primitive::kPrimShort:
893      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
894      break;
895
896    default:
897      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
898  }
899  sub->SetLocations(locations);
900}
901
902void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
903  LocationSummary* locations = sub->GetLocations();
904  switch (sub->GetResultType()) {
905    case Primitive::kPrimInt: {
906      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
907                locations->Out().AsX86().AsCpuRegister());
908      if (locations->InAt(1).IsRegister()) {
909        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
910                locations->InAt(1).AsX86().AsCpuRegister());
911      } else if (locations->InAt(1).IsConstant()) {
912        HConstant* instruction = locations->InAt(1).GetConstant();
913        Immediate imm(instruction->AsIntConstant()->GetValue());
914        __ subl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
915      } else {
916        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
917                Address(ESP, locations->InAt(1).GetStackIndex()));
918      }
919      break;
920    }
921
922    case Primitive::kPrimLong: {
923      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
924                locations->Out().AsX86().AsRegisterPair());
925      if (locations->InAt(1).IsRegister()) {
926        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
927                locations->InAt(1).AsX86().AsRegisterPairLow());
928        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
929                locations->InAt(1).AsX86().AsRegisterPairHigh());
930      } else {
931        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
932                Address(ESP, locations->InAt(1).GetStackIndex()));
933        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
934                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
935      }
936      break;
937    }
938
939    case Primitive::kPrimBoolean:
940    case Primitive::kPrimByte:
941    case Primitive::kPrimChar:
942    case Primitive::kPrimShort:
943      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
944      break;
945
946    default:
947      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
948  }
949}
950
951void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
952  codegen_->MarkNotLeaf();
953  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
954  locations->SetOut(X86CpuLocation(EAX));
955  InvokeRuntimeCallingConvention calling_convention;
956  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(0)));
957  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(1)));
958  instruction->SetLocations(locations);
959}
960
961void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
962  InvokeRuntimeCallingConvention calling_convention;
963  LoadCurrentMethod(calling_convention.GetRegisterAt(1));
964  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
965
966  __ fs()->call(
967      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
968
969  codegen_->RecordPcInfo(instruction->GetDexPc());
970  DCHECK(!codegen_->IsLeafMethod());
971}
972
973void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
974  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
975  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
976  if (location.IsStackSlot()) {
977    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
978  } else if (location.IsDoubleStackSlot()) {
979    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
980  }
981  locations->SetOut(location);
982  instruction->SetLocations(locations);
983}
984
985void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
986}
987
988void LocationsBuilderX86::VisitNot(HNot* instruction) {
989  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
990  locations->SetInAt(0, Location::RequiresRegister());
991  locations->SetOut(Location::SameAsFirstInput());
992  instruction->SetLocations(locations);
993}
994
995void InstructionCodeGeneratorX86::VisitNot(HNot* instruction) {
996  LocationSummary* locations = instruction->GetLocations();
997  Location out = locations->Out();
998  DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), out.AsX86().AsCpuRegister());
999  __ xorl(out.AsX86().AsCpuRegister(), Immediate(1));
1000}
1001
1002void LocationsBuilderX86::VisitCompare(HCompare* compare) {
1003  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
1004  locations->SetInAt(0, Location::RequiresRegister());
1005  locations->SetInAt(1, Location::Any());
1006  locations->SetOut(Location::RequiresRegister());
1007  compare->SetLocations(locations);
1008}
1009
1010void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
1011  Label greater, done;
1012  LocationSummary* locations = compare->GetLocations();
1013  switch (compare->InputAt(0)->GetType()) {
1014    case Primitive::kPrimLong: {
1015      Label less, greater, done;
1016      Register output = locations->Out().AsX86().AsCpuRegister();
1017      X86ManagedRegister left = locations->InAt(0).AsX86();
1018      Location right = locations->InAt(1);
1019      if (right.IsRegister()) {
1020        __ cmpl(left.AsRegisterPairHigh(), right.AsX86().AsRegisterPairHigh());
1021      } else {
1022        DCHECK(right.IsDoubleStackSlot());
1023        __ cmpl(left.AsRegisterPairHigh(), Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1024      }
1025      __ j(kLess, &less);  // Signed compare.
1026      __ j(kGreater, &greater);  // Signed compare.
1027      if (right.IsRegister()) {
1028        __ cmpl(left.AsRegisterPairLow(), right.AsX86().AsRegisterPairLow());
1029      } else {
1030        DCHECK(right.IsDoubleStackSlot());
1031        __ cmpl(left.AsRegisterPairLow(), Address(ESP, right.GetStackIndex()));
1032      }
1033      __ movl(output, Immediate(0));
1034      __ j(kEqual, &done);
1035      __ j(kBelow, &less);  // Unsigned compare.
1036
1037      __ Bind(&greater);
1038      __ movl(output, Immediate(1));
1039      __ jmp(&done);
1040
1041      __ Bind(&less);
1042      __ movl(output, Immediate(-1));
1043
1044      __ Bind(&done);
1045      break;
1046    }
1047    default:
1048      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1049  }
1050}
1051
1052void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
1053  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1054  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1055    locations->SetInAt(i, Location::Any());
1056  }
1057  locations->SetOut(Location::Any());
1058  instruction->SetLocations(locations);
1059}
1060
1061void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
1062  LOG(FATAL) << "Unreachable";
1063}
1064
1065void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1066  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1067  locations->SetInAt(0, Location::RequiresRegister());
1068  Primitive::Type field_type = instruction->InputAt(1)->GetType();
1069  if (field_type == Primitive::kPrimBoolean || field_type == Primitive::kPrimByte) {
1070    // Ensure the value is in a byte register.
1071    locations->SetInAt(1, X86CpuLocation(EAX));
1072  } else {
1073    locations->SetInAt(1, Location::RequiresRegister());
1074  }
1075  // Temporary registers for the write barrier.
1076  if (field_type == Primitive::kPrimNot) {
1077    locations->AddTemp(Location::RequiresRegister());
1078    // Ensure the card is in a byte register.
1079    locations->AddTemp(X86CpuLocation(ECX));
1080  }
1081  instruction->SetLocations(locations);
1082}
1083
1084void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1085  LocationSummary* locations = instruction->GetLocations();
1086  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1087  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1088  Primitive::Type field_type = instruction->InputAt(1)->GetType();
1089
1090  switch (field_type) {
1091    case Primitive::kPrimBoolean:
1092    case Primitive::kPrimByte: {
1093      ByteRegister value = locations->InAt(1).AsX86().AsByteRegister();
1094      __ movb(Address(obj, offset), value);
1095      break;
1096    }
1097
1098    case Primitive::kPrimShort:
1099    case Primitive::kPrimChar: {
1100      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1101      __ movw(Address(obj, offset), value);
1102      break;
1103    }
1104
1105    case Primitive::kPrimInt:
1106    case Primitive::kPrimNot: {
1107      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1108      __ movl(Address(obj, offset), value);
1109
1110      if (field_type == Primitive::kPrimNot) {
1111        Register temp = locations->GetTemp(0).AsX86().AsCpuRegister();
1112        Register card = locations->GetTemp(1).AsX86().AsCpuRegister();
1113        codegen_->MarkGCCard(temp, card, obj, value);
1114      }
1115      break;
1116    }
1117
1118    case Primitive::kPrimLong: {
1119      X86ManagedRegister value = locations->InAt(1).AsX86();
1120      __ movl(Address(obj, offset), value.AsRegisterPairLow());
1121      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh());
1122      break;
1123    }
1124
1125    case Primitive::kPrimFloat:
1126    case Primitive::kPrimDouble:
1127      LOG(FATAL) << "Unimplemented register type " << field_type;
1128
1129    case Primitive::kPrimVoid:
1130      LOG(FATAL) << "Unreachable type " << field_type;
1131  }
1132}
1133
1134void CodeGeneratorX86::MarkGCCard(Register temp, Register card, Register object, Register value) {
1135  Label is_null;
1136  __ testl(value, value);
1137  __ j(kEqual, &is_null);
1138  __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
1139  __ movl(temp, object);
1140  __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
1141  __ movb(Address(temp, card, TIMES_1, 0),
1142          X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
1143  __ Bind(&is_null);
1144}
1145
1146void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1147  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1148  locations->SetInAt(0, Location::RequiresRegister());
1149  locations->SetOut(Location::RequiresRegister());
1150  instruction->SetLocations(locations);
1151}
1152
1153void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1154  LocationSummary* locations = instruction->GetLocations();
1155  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1156  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1157
1158  switch (instruction->GetType()) {
1159    case Primitive::kPrimBoolean: {
1160      Register out = locations->Out().AsX86().AsCpuRegister();
1161      __ movzxb(out, Address(obj, offset));
1162      break;
1163    }
1164
1165    case Primitive::kPrimByte: {
1166      Register out = locations->Out().AsX86().AsCpuRegister();
1167      __ movsxb(out, Address(obj, offset));
1168      break;
1169    }
1170
1171    case Primitive::kPrimShort: {
1172      Register out = locations->Out().AsX86().AsCpuRegister();
1173      __ movsxw(out, Address(obj, offset));
1174      break;
1175    }
1176
1177    case Primitive::kPrimChar: {
1178      Register out = locations->Out().AsX86().AsCpuRegister();
1179      __ movzxw(out, Address(obj, offset));
1180      break;
1181    }
1182
1183    case Primitive::kPrimInt:
1184    case Primitive::kPrimNot: {
1185      Register out = locations->Out().AsX86().AsCpuRegister();
1186      __ movl(out, Address(obj, offset));
1187      break;
1188    }
1189
1190    case Primitive::kPrimLong: {
1191      // TODO: support volatile.
1192      X86ManagedRegister out = locations->Out().AsX86();
1193      __ movl(out.AsRegisterPairLow(), Address(obj, offset));
1194      __ movl(out.AsRegisterPairHigh(), Address(obj, kX86WordSize + offset));
1195      break;
1196    }
1197
1198    case Primitive::kPrimFloat:
1199    case Primitive::kPrimDouble:
1200      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1201
1202    case Primitive::kPrimVoid:
1203      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1204  }
1205}
1206
1207void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
1208  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1209  locations->SetInAt(0, Location::Any());
1210  // TODO: Have a normalization phase that makes this instruction never used.
1211  locations->SetOut(Location::SameAsFirstInput());
1212  instruction->SetLocations(locations);
1213}
1214
1215void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
1216  SlowPathCode* slow_path =
1217      new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction->GetDexPc());
1218  codegen_->AddSlowPath(slow_path);
1219
1220  LocationSummary* locations = instruction->GetLocations();
1221  Location obj = locations->InAt(0);
1222  DCHECK(obj.Equals(locations->Out()));
1223
1224  if (obj.IsRegister()) {
1225    __ cmpl(obj.AsX86().AsCpuRegister(), Immediate(0));
1226  } else {
1227    DCHECK(locations->InAt(0).IsStackSlot());
1228    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
1229  }
1230  __ j(kEqual, slow_path->GetEntryLabel());
1231}
1232
1233void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
1234  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1235  locations->SetInAt(0, Location::RequiresRegister());
1236  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1237  locations->SetOut(Location::RequiresRegister());
1238  instruction->SetLocations(locations);
1239}
1240
1241void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
1242  LocationSummary* locations = instruction->GetLocations();
1243  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1244  Location index = locations->InAt(1);
1245
1246  switch (instruction->GetType()) {
1247    case Primitive::kPrimBoolean: {
1248      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1249      Register out = locations->Out().AsX86().AsCpuRegister();
1250      if (index.IsConstant()) {
1251        __ movzxb(out, Address(obj,
1252            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1253      } else {
1254        __ movzxb(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset));
1255      }
1256      break;
1257    }
1258
1259    case Primitive::kPrimByte: {
1260      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
1261      Register out = locations->Out().AsX86().AsCpuRegister();
1262      if (index.IsConstant()) {
1263        __ movsxb(out, Address(obj,
1264            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1265      } else {
1266        __ movsxb(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset));
1267      }
1268      break;
1269    }
1270
1271    case Primitive::kPrimShort: {
1272      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
1273      Register out = locations->Out().AsX86().AsCpuRegister();
1274      if (index.IsConstant()) {
1275        __ movsxw(out, Address(obj,
1276            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1277      } else {
1278        __ movsxw(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset));
1279      }
1280      break;
1281    }
1282
1283    case Primitive::kPrimChar: {
1284      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1285      Register out = locations->Out().AsX86().AsCpuRegister();
1286      if (index.IsConstant()) {
1287        __ movzxw(out, Address(obj,
1288            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1289      } else {
1290        __ movzxw(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset));
1291      }
1292      break;
1293    }
1294
1295    case Primitive::kPrimInt:
1296    case Primitive::kPrimNot: {
1297      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1298      Register out = locations->Out().AsX86().AsCpuRegister();
1299      if (index.IsConstant()) {
1300        __ movl(out, Address(obj,
1301            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
1302      } else {
1303        __ movl(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_4, data_offset));
1304      }
1305      break;
1306    }
1307
1308    case Primitive::kPrimLong: {
1309      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1310      X86ManagedRegister out = locations->Out().AsX86();
1311      if (index.IsConstant()) {
1312        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1313        __ movl(out.AsRegisterPairLow(), Address(obj, offset));
1314        __ movl(out.AsRegisterPairHigh(), Address(obj, offset + kX86WordSize));
1315      } else {
1316        __ movl(out.AsRegisterPairLow(),
1317                Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset));
1318        __ movl(out.AsRegisterPairHigh(),
1319                Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset + kX86WordSize));
1320      }
1321      break;
1322    }
1323
1324    case Primitive::kPrimFloat:
1325    case Primitive::kPrimDouble:
1326      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1327
1328    case Primitive::kPrimVoid:
1329      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1330  }
1331}
1332
1333void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
1334  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1335  Primitive::Type value_type = instruction->InputAt(2)->GetType();
1336  if (value_type == Primitive::kPrimNot) {
1337    InvokeRuntimeCallingConvention calling_convention;
1338    locations->SetInAt(0, X86CpuLocation(calling_convention.GetRegisterAt(0)));
1339    locations->SetInAt(1, X86CpuLocation(calling_convention.GetRegisterAt(1)));
1340    locations->SetInAt(2, X86CpuLocation(calling_convention.GetRegisterAt(2)));
1341    codegen_->MarkNotLeaf();
1342  } else {
1343    locations->SetInAt(0, Location::RequiresRegister());
1344    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1345    if (value_type == Primitive::kPrimBoolean || value_type == Primitive::kPrimByte) {
1346      // Ensure the value is in a byte register.
1347      locations->SetInAt(2, X86CpuLocation(EAX));
1348    } else {
1349      locations->SetInAt(2, Location::RequiresRegister());
1350    }
1351  }
1352
1353  instruction->SetLocations(locations);
1354}
1355
1356void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
1357  LocationSummary* locations = instruction->GetLocations();
1358  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1359  Location index = locations->InAt(1);
1360  Primitive::Type value_type = instruction->InputAt(2)->GetType();
1361
1362  switch (value_type) {
1363    case Primitive::kPrimBoolean:
1364    case Primitive::kPrimByte: {
1365      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1366      ByteRegister value = locations->InAt(2).AsX86().AsByteRegister();
1367      if (index.IsConstant()) {
1368        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1369        __ movb(Address(obj, offset), value);
1370      } else {
1371        __ movb(Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset), value);
1372      }
1373      break;
1374    }
1375
1376    case Primitive::kPrimShort:
1377    case Primitive::kPrimChar: {
1378      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1379      Register value = locations->InAt(2).AsX86().AsCpuRegister();
1380      if (index.IsConstant()) {
1381        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1382        __ movw(Address(obj, offset), value);
1383      } else {
1384        __ movw(Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset), value);
1385      }
1386      break;
1387    }
1388
1389    case Primitive::kPrimInt: {
1390      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1391      Register value = locations->InAt(2).AsX86().AsCpuRegister();
1392      if (index.IsConstant()) {
1393        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1394        __ movl(Address(obj, offset), value);
1395      } else {
1396        __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_4, data_offset), value);
1397      }
1398      break;
1399    }
1400
1401    case Primitive::kPrimNot: {
1402      DCHECK(!codegen_->IsLeafMethod());
1403      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
1404      codegen_->RecordPcInfo(instruction->GetDexPc());
1405      break;
1406    }
1407
1408    case Primitive::kPrimLong: {
1409      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1410      X86ManagedRegister value = locations->InAt(2).AsX86();
1411      if (index.IsConstant()) {
1412        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1413        __ movl(Address(obj, offset), value.AsRegisterPairLow());
1414        __ movl(Address(obj, offset + kX86WordSize), value.AsRegisterPairHigh());
1415      } else {
1416        __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset),
1417                value.AsRegisterPairLow());
1418        __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset + kX86WordSize),
1419                value.AsRegisterPairHigh());
1420      }
1421      break;
1422    }
1423
1424    case Primitive::kPrimFloat:
1425    case Primitive::kPrimDouble:
1426      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1427
1428    case Primitive::kPrimVoid:
1429      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1430  }
1431}
1432
1433void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
1434  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1435  locations->SetInAt(0, Location::RequiresRegister());
1436  locations->SetOut(Location::RequiresRegister());
1437  instruction->SetLocations(locations);
1438}
1439
1440void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
1441  LocationSummary* locations = instruction->GetLocations();
1442  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
1443  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1444  Register out = locations->Out().AsX86().AsCpuRegister();
1445  __ movl(out, Address(obj, offset));
1446}
1447
1448void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
1449  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1450  locations->SetInAt(0, Location::RequiresRegister());
1451  locations->SetInAt(1, Location::RequiresRegister());
1452  // TODO: Have a normalization phase that makes this instruction never used.
1453  locations->SetOut(Location::SameAsFirstInput());
1454  instruction->SetLocations(locations);
1455}
1456
1457void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
1458  LocationSummary* locations = instruction->GetLocations();
1459  SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(
1460      instruction->GetDexPc(), locations->InAt(0), locations->InAt(1));
1461  codegen_->AddSlowPath(slow_path);
1462
1463  Register index = locations->InAt(0).AsX86().AsCpuRegister();
1464  Register length = locations->InAt(1).AsX86().AsCpuRegister();
1465
1466  __ cmpl(index, length);
1467  __ j(kAboveEqual, slow_path->GetEntryLabel());
1468}
1469
1470void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
1471  temp->SetLocations(nullptr);
1472}
1473
1474void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
1475  // Nothing to do, this is driven by the code generator.
1476}
1477
1478void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
1479  LOG(FATAL) << "Unreachable";
1480}
1481
1482void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
1483  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1484}
1485
1486X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
1487  return codegen_->GetAssembler();
1488}
1489
1490void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
1491  ScratchRegisterScope ensure_scratch(
1492      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1493  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1494  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
1495  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
1496}
1497
1498void ParallelMoveResolverX86::EmitMove(size_t index) {
1499  MoveOperands* move = moves_.Get(index);
1500  Location source = move->GetSource();
1501  Location destination = move->GetDestination();
1502
1503  if (source.IsRegister()) {
1504    if (destination.IsRegister()) {
1505      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1506    } else {
1507      DCHECK(destination.IsStackSlot());
1508      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
1509    }
1510  } else if (source.IsStackSlot()) {
1511    if (destination.IsRegister()) {
1512      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
1513    } else {
1514      DCHECK(destination.IsStackSlot());
1515      MoveMemoryToMemory(destination.GetStackIndex(),
1516                         source.GetStackIndex());
1517    }
1518  } else if (source.IsConstant()) {
1519    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
1520    Immediate imm(instruction->AsIntConstant()->GetValue());
1521    if (destination.IsRegister()) {
1522      __ movl(destination.AsX86().AsCpuRegister(), imm);
1523    } else {
1524      __ movl(Address(ESP, destination.GetStackIndex()), imm);
1525    }
1526  } else {
1527    LOG(FATAL) << "Unimplemented";
1528  }
1529}
1530
1531void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
1532  Register suggested_scratch = reg == EAX ? EBX : EAX;
1533  ScratchRegisterScope ensure_scratch(
1534      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1535
1536  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1537  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
1538  __ movl(Address(ESP, mem + stack_offset), reg);
1539  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
1540}
1541
1542void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
1543  ScratchRegisterScope ensure_scratch1(
1544      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1545
1546  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
1547  ScratchRegisterScope ensure_scratch2(
1548      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1549
1550  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
1551  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
1552  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
1553  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
1554  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
1555  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
1556}
1557
1558void ParallelMoveResolverX86::EmitSwap(size_t index) {
1559  MoveOperands* move = moves_.Get(index);
1560  Location source = move->GetSource();
1561  Location destination = move->GetDestination();
1562
1563  if (source.IsRegister() && destination.IsRegister()) {
1564    __ xchgl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1565  } else if (source.IsRegister() && destination.IsStackSlot()) {
1566    Exchange(source.AsX86().AsCpuRegister(), destination.GetStackIndex());
1567  } else if (source.IsStackSlot() && destination.IsRegister()) {
1568    Exchange(destination.AsX86().AsCpuRegister(), source.GetStackIndex());
1569  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1570    Exchange(destination.GetStackIndex(), source.GetStackIndex());
1571  } else {
1572    LOG(FATAL) << "Unimplemented";
1573  }
1574}
1575
1576void ParallelMoveResolverX86::SpillScratch(int reg) {
1577  __ pushl(static_cast<Register>(reg));
1578}
1579
1580void ParallelMoveResolverX86::RestoreScratch(int reg) {
1581  __ popl(static_cast<Register>(reg));
1582}
1583
1584}  // namespace x86
1585}  // namespace art
1586