code_generator_x86.cc revision 51d3fc40637fc73d4156ad617cd451b844cbb75e
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/stack_checks.h"
27#include "utils/x86/assembler_x86.h"
28#include "utils/x86/managed_register_x86.h"
29
30namespace art {
31
32namespace x86 {
33
34static constexpr bool kExplicitStackOverflowCheck = false;
35
36static constexpr int kNumberOfPushedRegistersAtEntry = 1;
37static constexpr int kCurrentMethodStackOffset = 0;
38
39static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX, EBX };
40static constexpr size_t kRuntimeParameterCoreRegistersLength =
41    arraysize(kRuntimeParameterCoreRegisters);
42static constexpr XmmRegister kRuntimeParameterFpuRegisters[] = { };
43static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
44
45class InvokeRuntimeCallingConvention : public CallingConvention<Register, XmmRegister> {
46 public:
47  InvokeRuntimeCallingConvention()
48      : CallingConvention(kRuntimeParameterCoreRegisters,
49                          kRuntimeParameterCoreRegistersLength,
50                          kRuntimeParameterFpuRegisters,
51                          kRuntimeParameterFpuRegistersLength) {}
52
53 private:
54  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
55};
56
57#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
58
59class SlowPathCodeX86 : public SlowPathCode {
60 public:
61  SlowPathCodeX86() : entry_label_(), exit_label_() {}
62
63  Label* GetEntryLabel() { return &entry_label_; }
64  Label* GetExitLabel() { return &exit_label_; }
65
66 private:
67  Label entry_label_;
68  Label exit_label_;
69
70  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeX86);
71};
72
73class NullCheckSlowPathX86 : public SlowPathCodeX86 {
74 public:
75  explicit NullCheckSlowPathX86(HNullCheck* instruction) : instruction_(instruction) {}
76
77  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
78    __ Bind(GetEntryLabel());
79    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
80    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
81  }
82
83 private:
84  HNullCheck* const instruction_;
85  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
86};
87
88class DivZeroCheckSlowPathX86 : public SlowPathCodeX86 {
89 public:
90  explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : instruction_(instruction) {}
91
92  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
93    __ Bind(GetEntryLabel());
94    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowDivZero)));
95    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
96  }
97
98 private:
99  HDivZeroCheck* const instruction_;
100  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
101};
102
103class DivMinusOneSlowPathX86 : public SlowPathCodeX86 {
104 public:
105  explicit DivMinusOneSlowPathX86(Register reg) : reg_(reg) {}
106
107  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
108    __ Bind(GetEntryLabel());
109    __ negl(reg_);
110    __ jmp(GetExitLabel());
111  }
112
113 private:
114  Register reg_;
115  DISALLOW_COPY_AND_ASSIGN(DivMinusOneSlowPathX86);
116};
117
118class StackOverflowCheckSlowPathX86 : public SlowPathCodeX86 {
119 public:
120  StackOverflowCheckSlowPathX86() {}
121
122  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
123    __ Bind(GetEntryLabel());
124    __ addl(ESP,
125            Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
126    __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow)));
127  }
128
129 private:
130  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86);
131};
132
133class BoundsCheckSlowPathX86 : public SlowPathCodeX86 {
134 public:
135  BoundsCheckSlowPathX86(HBoundsCheck* instruction,
136                         Location index_location,
137                         Location length_location)
138      : instruction_(instruction), index_location_(index_location), length_location_(length_location) {}
139
140  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
141    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
142    __ Bind(GetEntryLabel());
143    // We're moving two locations to locations that could overlap, so we need a parallel
144    // move resolver.
145    InvokeRuntimeCallingConvention calling_convention;
146    x86_codegen->EmitParallelMoves(
147        index_location_,
148        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
149        length_location_,
150        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
151    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowArrayBounds)));
152    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
153  }
154
155 private:
156  HBoundsCheck* const instruction_;
157  const Location index_location_;
158  const Location length_location_;
159
160  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
161};
162
163class SuspendCheckSlowPathX86 : public SlowPathCodeX86 {
164 public:
165  explicit SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
166      : instruction_(instruction), successor_(successor) {}
167
168  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
169    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
170    __ Bind(GetEntryLabel());
171    codegen->SaveLiveRegisters(instruction_->GetLocations());
172    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pTestSuspend)));
173    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
174    codegen->RestoreLiveRegisters(instruction_->GetLocations());
175    if (successor_ == nullptr) {
176      __ jmp(GetReturnLabel());
177    } else {
178      __ jmp(x86_codegen->GetLabelOf(successor_));
179    }
180  }
181
182  Label* GetReturnLabel() {
183    DCHECK(successor_ == nullptr);
184    return &return_label_;
185  }
186
187 private:
188  HSuspendCheck* const instruction_;
189  HBasicBlock* const successor_;
190  Label return_label_;
191
192  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
193};
194
195class LoadStringSlowPathX86 : public SlowPathCodeX86 {
196 public:
197  explicit LoadStringSlowPathX86(HLoadString* instruction) : instruction_(instruction) {}
198
199  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
200    LocationSummary* locations = instruction_->GetLocations();
201    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
202
203    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
204    __ Bind(GetEntryLabel());
205    codegen->SaveLiveRegisters(locations);
206
207    InvokeRuntimeCallingConvention calling_convention;
208    x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
209    __ movl(calling_convention.GetRegisterAt(1), Immediate(instruction_->GetStringIndex()));
210    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pResolveString)));
211    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
212    x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
213    codegen->RestoreLiveRegisters(locations);
214
215    __ jmp(GetExitLabel());
216  }
217
218 private:
219  HLoadString* const instruction_;
220
221  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
222};
223
224class LoadClassSlowPathX86 : public SlowPathCodeX86 {
225 public:
226  LoadClassSlowPathX86(HLoadClass* cls,
227                       HInstruction* at,
228                       uint32_t dex_pc,
229                       bool do_clinit)
230      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
231    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
232  }
233
234  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
235    LocationSummary* locations = at_->GetLocations();
236    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
237    __ Bind(GetEntryLabel());
238    codegen->SaveLiveRegisters(locations);
239
240    InvokeRuntimeCallingConvention calling_convention;
241    __ movl(calling_convention.GetRegisterAt(0), Immediate(cls_->GetTypeIndex()));
242    x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
243    __ fs()->call(Address::Absolute(do_clinit_
244        ? QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInitializeStaticStorage)
245        : QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInitializeType)));
246    codegen->RecordPcInfo(at_, dex_pc_);
247
248    // Move the class to the desired location.
249    Location out = locations->Out();
250    if (out.IsValid()) {
251      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
252      x86_codegen->Move32(out, Location::RegisterLocation(EAX));
253    }
254
255    codegen->RestoreLiveRegisters(locations);
256    __ jmp(GetExitLabel());
257  }
258
259 private:
260  // The class this slow path will load.
261  HLoadClass* const cls_;
262
263  // The instruction where this slow path is happening.
264  // (Might be the load class or an initialization check).
265  HInstruction* const at_;
266
267  // The dex PC of `at_`.
268  const uint32_t dex_pc_;
269
270  // Whether to initialize the class.
271  const bool do_clinit_;
272
273  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
274};
275
276class TypeCheckSlowPathX86 : public SlowPathCodeX86 {
277 public:
278  TypeCheckSlowPathX86(HInstruction* instruction,
279                       Location class_to_check,
280                       Location object_class,
281                       uint32_t dex_pc)
282      : instruction_(instruction),
283        class_to_check_(class_to_check),
284        object_class_(object_class),
285        dex_pc_(dex_pc) {}
286
287  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
288    LocationSummary* locations = instruction_->GetLocations();
289    DCHECK(instruction_->IsCheckCast()
290           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
291
292    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
293    __ Bind(GetEntryLabel());
294    codegen->SaveLiveRegisters(locations);
295
296    // We're moving two locations to locations that could overlap, so we need a parallel
297    // move resolver.
298    InvokeRuntimeCallingConvention calling_convention;
299    x86_codegen->EmitParallelMoves(
300        class_to_check_,
301        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
302        object_class_,
303        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
304
305    if (instruction_->IsInstanceOf()) {
306      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInstanceofNonTrivial)));
307    } else {
308      DCHECK(instruction_->IsCheckCast());
309      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pCheckCast)));
310    }
311
312    codegen->RecordPcInfo(instruction_, dex_pc_);
313    if (instruction_->IsInstanceOf()) {
314      x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
315    }
316    codegen->RestoreLiveRegisters(locations);
317
318    __ jmp(GetExitLabel());
319  }
320
321 private:
322  HInstruction* const instruction_;
323  const Location class_to_check_;
324  const Location object_class_;
325  const uint32_t dex_pc_;
326
327  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
328};
329
330#undef __
331#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
332
333inline Condition X86Condition(IfCondition cond) {
334  switch (cond) {
335    case kCondEQ: return kEqual;
336    case kCondNE: return kNotEqual;
337    case kCondLT: return kLess;
338    case kCondLE: return kLessEqual;
339    case kCondGT: return kGreater;
340    case kCondGE: return kGreaterEqual;
341    default:
342      LOG(FATAL) << "Unknown if condition";
343  }
344  return kEqual;
345}
346
347void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
348  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
349}
350
351void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
352  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
353}
354
355size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
356  __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
357  return kX86WordSize;
358}
359
360size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
361  __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
362  return kX86WordSize;
363}
364
365CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
366    : CodeGenerator(graph, kNumberOfCpuRegisters, kNumberOfXmmRegisters, kNumberOfRegisterPairs),
367      block_labels_(graph->GetArena(), 0),
368      location_builder_(graph, this),
369      instruction_visitor_(graph, this),
370      move_resolver_(graph->GetArena(), this) {}
371
372size_t CodeGeneratorX86::FrameEntrySpillSize() const {
373  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
374}
375
376Location CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type) const {
377  switch (type) {
378    case Primitive::kPrimLong: {
379      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
380      X86ManagedRegister pair =
381          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
382      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
383      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
384      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
385      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
386      UpdateBlockedPairRegisters();
387      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
388    }
389
390    case Primitive::kPrimByte:
391    case Primitive::kPrimBoolean:
392    case Primitive::kPrimChar:
393    case Primitive::kPrimShort:
394    case Primitive::kPrimInt:
395    case Primitive::kPrimNot: {
396      Register reg = static_cast<Register>(
397          FindFreeEntry(blocked_core_registers_, kNumberOfCpuRegisters));
398      // Block all register pairs that contain `reg`.
399      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
400        X86ManagedRegister current =
401            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
402        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
403          blocked_register_pairs_[i] = true;
404        }
405      }
406      return Location::RegisterLocation(reg);
407    }
408
409    case Primitive::kPrimFloat:
410    case Primitive::kPrimDouble: {
411      return Location::FpuRegisterLocation(
412          FindFreeEntry(blocked_fpu_registers_, kNumberOfXmmRegisters));
413    }
414
415    case Primitive::kPrimVoid:
416      LOG(FATAL) << "Unreachable type " << type;
417  }
418
419  return Location();
420}
421
422void CodeGeneratorX86::SetupBlockedRegisters() const {
423  // Don't allocate the dalvik style register pair passing.
424  blocked_register_pairs_[ECX_EDX] = true;
425
426  // Stack register is always reserved.
427  blocked_core_registers_[ESP] = true;
428
429  // TODO: We currently don't use Quick's callee saved registers.
430  blocked_core_registers_[EBP] = true;
431  blocked_core_registers_[ESI] = true;
432  blocked_core_registers_[EDI] = true;
433
434  UpdateBlockedPairRegisters();
435}
436
437void CodeGeneratorX86::UpdateBlockedPairRegisters() const {
438  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
439    X86ManagedRegister current =
440        X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
441    if (blocked_core_registers_[current.AsRegisterPairLow()]
442        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
443      blocked_register_pairs_[i] = true;
444    }
445  }
446}
447
448InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
449      : HGraphVisitor(graph),
450        assembler_(codegen->GetAssembler()),
451        codegen_(codegen) {}
452
453void CodeGeneratorX86::GenerateFrameEntry() {
454  // Create a fake register to mimic Quick.
455  static const int kFakeReturnRegister = 8;
456  core_spill_mask_ |= (1 << kFakeReturnRegister);
457
458  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
459  if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
460    __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
461    RecordPcInfo(nullptr, 0);
462  }
463
464  // The return PC has already been pushed on the stack.
465  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
466
467  if (!skip_overflow_check && kExplicitStackOverflowCheck) {
468    SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
469    AddSlowPath(slow_path);
470
471    __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>()));
472    __ j(kLess, slow_path->GetEntryLabel());
473  }
474
475  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
476}
477
478void CodeGeneratorX86::GenerateFrameExit() {
479  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
480}
481
482void CodeGeneratorX86::Bind(HBasicBlock* block) {
483  __ Bind(GetLabelOf(block));
484}
485
486void CodeGeneratorX86::LoadCurrentMethod(Register reg) {
487  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
488}
489
490Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
491  switch (load->GetType()) {
492    case Primitive::kPrimLong:
493    case Primitive::kPrimDouble:
494      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
495      break;
496
497    case Primitive::kPrimInt:
498    case Primitive::kPrimNot:
499    case Primitive::kPrimFloat:
500      return Location::StackSlot(GetStackSlot(load->GetLocal()));
501
502    case Primitive::kPrimBoolean:
503    case Primitive::kPrimByte:
504    case Primitive::kPrimChar:
505    case Primitive::kPrimShort:
506    case Primitive::kPrimVoid:
507      LOG(FATAL) << "Unexpected type " << load->GetType();
508  }
509
510  LOG(FATAL) << "Unreachable";
511  return Location();
512}
513
514Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
515  switch (type) {
516    case Primitive::kPrimBoolean:
517    case Primitive::kPrimByte:
518    case Primitive::kPrimChar:
519    case Primitive::kPrimShort:
520    case Primitive::kPrimInt:
521    case Primitive::kPrimFloat:
522    case Primitive::kPrimNot: {
523      uint32_t index = gp_index_++;
524      if (index < calling_convention.GetNumberOfRegisters()) {
525        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
526      } else {
527        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
528      }
529    }
530
531    case Primitive::kPrimLong:
532    case Primitive::kPrimDouble: {
533      uint32_t index = gp_index_;
534      gp_index_ += 2;
535      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
536        X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
537            calling_convention.GetRegisterPairAt(index));
538        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
539      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
540        // On X86, the register index and stack index of a quick parameter is the same, since
541        // we are passing floating pointer values in core registers.
542        return Location::QuickParameter(index, index);
543      } else {
544        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
545      }
546    }
547
548    case Primitive::kPrimVoid:
549      LOG(FATAL) << "Unexpected parameter type " << type;
550      break;
551  }
552  return Location();
553}
554
555void CodeGeneratorX86::Move32(Location destination, Location source) {
556  if (source.Equals(destination)) {
557    return;
558  }
559  if (destination.IsRegister()) {
560    if (source.IsRegister()) {
561      __ movl(destination.As<Register>(), source.As<Register>());
562    } else if (source.IsFpuRegister()) {
563      __ movd(destination.As<Register>(), source.As<XmmRegister>());
564    } else {
565      DCHECK(source.IsStackSlot());
566      __ movl(destination.As<Register>(), Address(ESP, source.GetStackIndex()));
567    }
568  } else if (destination.IsFpuRegister()) {
569    if (source.IsRegister()) {
570      __ movd(destination.As<XmmRegister>(), source.As<Register>());
571    } else if (source.IsFpuRegister()) {
572      __ movaps(destination.As<XmmRegister>(), source.As<XmmRegister>());
573    } else {
574      DCHECK(source.IsStackSlot());
575      __ movss(destination.As<XmmRegister>(), Address(ESP, source.GetStackIndex()));
576    }
577  } else {
578    DCHECK(destination.IsStackSlot()) << destination;
579    if (source.IsRegister()) {
580      __ movl(Address(ESP, destination.GetStackIndex()), source.As<Register>());
581    } else if (source.IsFpuRegister()) {
582      __ movss(Address(ESP, destination.GetStackIndex()), source.As<XmmRegister>());
583    } else {
584      DCHECK(source.IsStackSlot());
585      __ pushl(Address(ESP, source.GetStackIndex()));
586      __ popl(Address(ESP, destination.GetStackIndex()));
587    }
588  }
589}
590
591void CodeGeneratorX86::Move64(Location destination, Location source) {
592  if (source.Equals(destination)) {
593    return;
594  }
595  if (destination.IsRegisterPair()) {
596    if (source.IsRegisterPair()) {
597      __ movl(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
598      __ movl(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
599    } else if (source.IsFpuRegister()) {
600      LOG(FATAL) << "Unimplemented";
601    } else if (source.IsQuickParameter()) {
602      uint16_t register_index = source.GetQuickParameterRegisterIndex();
603      uint16_t stack_index = source.GetQuickParameterStackIndex();
604      InvokeDexCallingConvention calling_convention;
605      __ movl(destination.AsRegisterPairLow<Register>(),
606              calling_convention.GetRegisterAt(register_index));
607      __ movl(destination.AsRegisterPairHigh<Register>(), Address(ESP,
608          calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize()));
609    } else {
610      DCHECK(source.IsDoubleStackSlot());
611      __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
612      __ movl(destination.AsRegisterPairHigh<Register>(),
613              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
614    }
615  } else if (destination.IsQuickParameter()) {
616    InvokeDexCallingConvention calling_convention;
617    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
618    uint16_t stack_index = destination.GetQuickParameterStackIndex();
619    if (source.IsRegister()) {
620      __ movl(calling_convention.GetRegisterAt(register_index), source.AsRegisterPairLow<Register>());
621      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(stack_index + 1)),
622              source.AsRegisterPairHigh<Register>());
623    } else if (source.IsFpuRegister()) {
624      LOG(FATAL) << "Unimplemented";
625    } else {
626      DCHECK(source.IsDoubleStackSlot());
627      __ movl(calling_convention.GetRegisterAt(register_index),
628              Address(ESP, source.GetStackIndex()));
629      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
630      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(stack_index + 1)));
631    }
632  } else if (destination.IsFpuRegister()) {
633    if (source.IsDoubleStackSlot()) {
634      __ movsd(destination.As<XmmRegister>(), Address(ESP, source.GetStackIndex()));
635    } else {
636      LOG(FATAL) << "Unimplemented";
637    }
638  } else {
639    DCHECK(destination.IsDoubleStackSlot()) << destination;
640    if (source.IsRegisterPair()) {
641      __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
642      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
643              source.AsRegisterPairHigh<Register>());
644    } else if (source.IsQuickParameter()) {
645      InvokeDexCallingConvention calling_convention;
646      uint16_t register_index = source.GetQuickParameterRegisterIndex();
647      uint16_t stack_index = source.GetQuickParameterStackIndex();
648      __ movl(Address(ESP, destination.GetStackIndex()),
649              calling_convention.GetRegisterAt(register_index));
650      DCHECK_EQ(calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize(),
651                static_cast<size_t>(destination.GetHighStackIndex(kX86WordSize)));
652    } else if (source.IsFpuRegister()) {
653      __ movsd(Address(ESP, destination.GetStackIndex()), source.As<XmmRegister>());
654    } else {
655      DCHECK(source.IsDoubleStackSlot());
656      __ pushl(Address(ESP, source.GetStackIndex()));
657      __ popl(Address(ESP, destination.GetStackIndex()));
658      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
659      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
660    }
661  }
662}
663
664void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
665  if (instruction->IsIntConstant()) {
666    Immediate imm(instruction->AsIntConstant()->GetValue());
667    if (location.IsRegister()) {
668      __ movl(location.As<Register>(), imm);
669    } else if (location.IsStackSlot()) {
670      __ movl(Address(ESP, location.GetStackIndex()), imm);
671    } else {
672      DCHECK(location.IsConstant());
673      DCHECK_EQ(location.GetConstant(), instruction);
674    }
675  } else if (instruction->IsLongConstant()) {
676    int64_t value = instruction->AsLongConstant()->GetValue();
677    if (location.IsRegisterPair()) {
678      __ movl(location.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
679      __ movl(location.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
680    } else if (location.IsDoubleStackSlot()) {
681      __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
682      __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
683    } else {
684      DCHECK(location.IsConstant());
685      DCHECK_EQ(location.GetConstant(), instruction);
686    }
687  } else if (instruction->IsTemporary()) {
688    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
689    if (temp_location.IsStackSlot()) {
690      Move32(location, temp_location);
691    } else {
692      DCHECK(temp_location.IsDoubleStackSlot());
693      Move64(location, temp_location);
694    }
695  } else if (instruction->IsLoadLocal()) {
696    int slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
697    switch (instruction->GetType()) {
698      case Primitive::kPrimBoolean:
699      case Primitive::kPrimByte:
700      case Primitive::kPrimChar:
701      case Primitive::kPrimShort:
702      case Primitive::kPrimInt:
703      case Primitive::kPrimNot:
704      case Primitive::kPrimFloat:
705        Move32(location, Location::StackSlot(slot));
706        break;
707
708      case Primitive::kPrimLong:
709      case Primitive::kPrimDouble:
710        Move64(location, Location::DoubleStackSlot(slot));
711        break;
712
713      default:
714        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
715    }
716  } else {
717    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
718    switch (instruction->GetType()) {
719      case Primitive::kPrimBoolean:
720      case Primitive::kPrimByte:
721      case Primitive::kPrimChar:
722      case Primitive::kPrimShort:
723      case Primitive::kPrimInt:
724      case Primitive::kPrimNot:
725      case Primitive::kPrimFloat:
726        Move32(location, instruction->GetLocations()->Out());
727        break;
728
729      case Primitive::kPrimLong:
730      case Primitive::kPrimDouble:
731        Move64(location, instruction->GetLocations()->Out());
732        break;
733
734      default:
735        LOG(FATAL) << "Unexpected type " << instruction->GetType();
736    }
737  }
738}
739
740void LocationsBuilderX86::VisitGoto(HGoto* got) {
741  got->SetLocations(nullptr);
742}
743
744void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
745  HBasicBlock* successor = got->GetSuccessor();
746  DCHECK(!successor->IsExitBlock());
747
748  HBasicBlock* block = got->GetBlock();
749  HInstruction* previous = got->GetPrevious();
750
751  HLoopInformation* info = block->GetLoopInformation();
752  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
753    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
754    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
755    return;
756  }
757
758  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
759    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
760  }
761  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
762    __ jmp(codegen_->GetLabelOf(successor));
763  }
764}
765
766void LocationsBuilderX86::VisitExit(HExit* exit) {
767  exit->SetLocations(nullptr);
768}
769
770void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
771  UNUSED(exit);
772  if (kIsDebugBuild) {
773    __ Comment("Unreachable");
774    __ int3();
775  }
776}
777
778void LocationsBuilderX86::VisitIf(HIf* if_instr) {
779  LocationSummary* locations =
780      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
781  HInstruction* cond = if_instr->InputAt(0);
782  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
783    locations->SetInAt(0, Location::Any());
784  }
785}
786
787void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
788  HInstruction* cond = if_instr->InputAt(0);
789  if (cond->IsIntConstant()) {
790    // Constant condition, statically compared against 1.
791    int32_t cond_value = cond->AsIntConstant()->GetValue();
792    if (cond_value == 1) {
793      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
794                                     if_instr->IfTrueSuccessor())) {
795        __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
796      }
797      return;
798    } else {
799      DCHECK_EQ(cond_value, 0);
800    }
801  } else {
802    bool materialized =
803        !cond->IsCondition() || cond->AsCondition()->NeedsMaterialization();
804    // Moves do not affect the eflags register, so if the condition is
805    // evaluated just before the if, we don't need to evaluate it
806    // again.
807    bool eflags_set = cond->IsCondition()
808        && cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr);
809    if (materialized) {
810      if (!eflags_set) {
811        // Materialized condition, compare against 0.
812        Location lhs = if_instr->GetLocations()->InAt(0);
813        if (lhs.IsRegister()) {
814          __ cmpl(lhs.As<Register>(), Immediate(0));
815        } else {
816          __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
817        }
818        __ j(kNotEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
819      } else {
820        __ j(X86Condition(cond->AsCondition()->GetCondition()),
821             codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
822      }
823    } else {
824      Location lhs = cond->GetLocations()->InAt(0);
825      Location rhs = cond->GetLocations()->InAt(1);
826      // LHS is guaranteed to be in a register (see
827      // LocationsBuilderX86::VisitCondition).
828      if (rhs.IsRegister()) {
829        __ cmpl(lhs.As<Register>(), rhs.As<Register>());
830      } else if (rhs.IsConstant()) {
831        HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
832        Immediate imm(instruction->AsIntConstant()->GetValue());
833        __ cmpl(lhs.As<Register>(), imm);
834      } else {
835        __ cmpl(lhs.As<Register>(), Address(ESP, rhs.GetStackIndex()));
836      }
837      __ j(X86Condition(cond->AsCondition()->GetCondition()),
838           codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
839    }
840  }
841  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
842                                 if_instr->IfFalseSuccessor())) {
843    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
844  }
845}
846
847void LocationsBuilderX86::VisitLocal(HLocal* local) {
848  local->SetLocations(nullptr);
849}
850
851void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
852  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
853}
854
855void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
856  local->SetLocations(nullptr);
857}
858
859void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
860  // Nothing to do, this is driven by the code generator.
861  UNUSED(load);
862}
863
864void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
865  LocationSummary* locations =
866      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
867  switch (store->InputAt(1)->GetType()) {
868    case Primitive::kPrimBoolean:
869    case Primitive::kPrimByte:
870    case Primitive::kPrimChar:
871    case Primitive::kPrimShort:
872    case Primitive::kPrimInt:
873    case Primitive::kPrimNot:
874    case Primitive::kPrimFloat:
875      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
876      break;
877
878    case Primitive::kPrimLong:
879    case Primitive::kPrimDouble:
880      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
881      break;
882
883    default:
884      LOG(FATAL) << "Unknown local type " << store->InputAt(1)->GetType();
885  }
886  store->SetLocations(locations);
887}
888
889void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
890  UNUSED(store);
891}
892
893void LocationsBuilderX86::VisitCondition(HCondition* comp) {
894  LocationSummary* locations =
895      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
896  locations->SetInAt(0, Location::RequiresRegister());
897  locations->SetInAt(1, Location::Any());
898  if (comp->NeedsMaterialization()) {
899    locations->SetOut(Location::RequiresRegister());
900  }
901}
902
903void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
904  if (comp->NeedsMaterialization()) {
905    LocationSummary* locations = comp->GetLocations();
906    Register reg = locations->Out().As<Register>();
907    // Clear register: setcc only sets the low byte.
908    __ xorl(reg, reg);
909    if (locations->InAt(1).IsRegister()) {
910      __ cmpl(locations->InAt(0).As<Register>(),
911              locations->InAt(1).As<Register>());
912    } else if (locations->InAt(1).IsConstant()) {
913      HConstant* instruction = locations->InAt(1).GetConstant();
914      Immediate imm(instruction->AsIntConstant()->GetValue());
915      __ cmpl(locations->InAt(0).As<Register>(), imm);
916    } else {
917      __ cmpl(locations->InAt(0).As<Register>(),
918              Address(ESP, locations->InAt(1).GetStackIndex()));
919    }
920    __ setb(X86Condition(comp->GetCondition()), reg);
921  }
922}
923
924void LocationsBuilderX86::VisitEqual(HEqual* comp) {
925  VisitCondition(comp);
926}
927
928void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
929  VisitCondition(comp);
930}
931
932void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
933  VisitCondition(comp);
934}
935
936void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
937  VisitCondition(comp);
938}
939
940void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
941  VisitCondition(comp);
942}
943
944void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
945  VisitCondition(comp);
946}
947
948void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
949  VisitCondition(comp);
950}
951
952void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
953  VisitCondition(comp);
954}
955
956void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
957  VisitCondition(comp);
958}
959
960void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
961  VisitCondition(comp);
962}
963
964void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
965  VisitCondition(comp);
966}
967
968void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
969  VisitCondition(comp);
970}
971
972void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
973  LocationSummary* locations =
974      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
975  locations->SetOut(Location::ConstantLocation(constant));
976}
977
978void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
979  // Will be generated at use site.
980  UNUSED(constant);
981}
982
983void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
984  LocationSummary* locations =
985      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
986  locations->SetOut(Location::ConstantLocation(constant));
987}
988
989void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
990  // Will be generated at use site.
991  UNUSED(constant);
992}
993
994void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
995  LocationSummary* locations =
996      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
997  locations->SetOut(Location::ConstantLocation(constant));
998}
999
1000void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant) {
1001  // Will be generated at use site.
1002  UNUSED(constant);
1003}
1004
1005void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
1006  LocationSummary* locations =
1007      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1008  locations->SetOut(Location::ConstantLocation(constant));
1009}
1010
1011void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant) {
1012  // Will be generated at use site.
1013  UNUSED(constant);
1014}
1015
1016void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
1017  ret->SetLocations(nullptr);
1018}
1019
1020void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
1021  UNUSED(ret);
1022  codegen_->GenerateFrameExit();
1023  __ ret();
1024}
1025
1026void LocationsBuilderX86::VisitReturn(HReturn* ret) {
1027  LocationSummary* locations =
1028      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1029  switch (ret->InputAt(0)->GetType()) {
1030    case Primitive::kPrimBoolean:
1031    case Primitive::kPrimByte:
1032    case Primitive::kPrimChar:
1033    case Primitive::kPrimShort:
1034    case Primitive::kPrimInt:
1035    case Primitive::kPrimNot:
1036      locations->SetInAt(0, Location::RegisterLocation(EAX));
1037      break;
1038
1039    case Primitive::kPrimLong:
1040      locations->SetInAt(
1041          0, Location::RegisterPairLocation(EAX, EDX));
1042      break;
1043
1044    case Primitive::kPrimFloat:
1045    case Primitive::kPrimDouble:
1046      locations->SetInAt(
1047          0, Location::FpuRegisterLocation(XMM0));
1048      break;
1049
1050    default:
1051      LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
1052  }
1053}
1054
1055void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
1056  if (kIsDebugBuild) {
1057    switch (ret->InputAt(0)->GetType()) {
1058      case Primitive::kPrimBoolean:
1059      case Primitive::kPrimByte:
1060      case Primitive::kPrimChar:
1061      case Primitive::kPrimShort:
1062      case Primitive::kPrimInt:
1063      case Primitive::kPrimNot:
1064        DCHECK_EQ(ret->GetLocations()->InAt(0).As<Register>(), EAX);
1065        break;
1066
1067      case Primitive::kPrimLong:
1068        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
1069        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
1070        break;
1071
1072      case Primitive::kPrimFloat:
1073      case Primitive::kPrimDouble:
1074        DCHECK_EQ(ret->GetLocations()->InAt(0).As<XmmRegister>(), XMM0);
1075        break;
1076
1077      default:
1078        LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
1079    }
1080  }
1081  codegen_->GenerateFrameExit();
1082  __ ret();
1083}
1084
1085void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
1086  HandleInvoke(invoke);
1087}
1088
1089void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
1090  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1091
1092  // TODO: Implement all kinds of calls:
1093  // 1) boot -> boot
1094  // 2) app -> boot
1095  // 3) app -> app
1096  //
1097  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1098
1099  // temp = method;
1100  codegen_->LoadCurrentMethod(temp);
1101  // temp = temp->dex_cache_resolved_methods_;
1102  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
1103  // temp = temp[index_in_cache]
1104  __ movl(temp, Address(temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache())));
1105  // (temp + offset_of_quick_compiled_code)()
1106  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
1107
1108  DCHECK(!codegen_->IsLeafMethod());
1109  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1110}
1111
1112void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1113  HandleInvoke(invoke);
1114}
1115
1116void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
1117  LocationSummary* locations =
1118      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1119  locations->AddTemp(Location::RegisterLocation(EAX));
1120
1121  InvokeDexCallingConventionVisitor calling_convention_visitor;
1122  for (size_t i = 0; i < invoke->InputCount(); i++) {
1123    HInstruction* input = invoke->InputAt(i);
1124    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1125  }
1126
1127  switch (invoke->GetType()) {
1128    case Primitive::kPrimBoolean:
1129    case Primitive::kPrimByte:
1130    case Primitive::kPrimChar:
1131    case Primitive::kPrimShort:
1132    case Primitive::kPrimInt:
1133    case Primitive::kPrimNot:
1134      locations->SetOut(Location::RegisterLocation(EAX));
1135      break;
1136
1137    case Primitive::kPrimLong:
1138      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1139      break;
1140
1141    case Primitive::kPrimVoid:
1142      break;
1143
1144    case Primitive::kPrimDouble:
1145    case Primitive::kPrimFloat:
1146      locations->SetOut(Location::FpuRegisterLocation(XMM0));
1147      break;
1148  }
1149
1150  invoke->SetLocations(locations);
1151}
1152
1153void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1154  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1155  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1156          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1157  LocationSummary* locations = invoke->GetLocations();
1158  Location receiver = locations->InAt(0);
1159  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1160  // temp = object->GetClass();
1161  if (receiver.IsStackSlot()) {
1162    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
1163    __ movl(temp, Address(temp, class_offset));
1164  } else {
1165    __ movl(temp, Address(receiver.As<Register>(), class_offset));
1166  }
1167  // temp = temp->GetMethodAt(method_offset);
1168  __ movl(temp, Address(temp, method_offset));
1169  // call temp->GetEntryPoint();
1170  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
1171
1172  DCHECK(!codegen_->IsLeafMethod());
1173  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1174}
1175
1176void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
1177  HandleInvoke(invoke);
1178  // Add the hidden argument.
1179  invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM0));
1180}
1181
1182void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
1183  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1184  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1185  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1186          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1187  LocationSummary* locations = invoke->GetLocations();
1188  Location receiver = locations->InAt(0);
1189  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1190
1191  // Set the hidden argument.
1192  __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
1193  __ movd(invoke->GetLocations()->GetTemp(1).As<XmmRegister>(), temp);
1194
1195  // temp = object->GetClass();
1196  if (receiver.IsStackSlot()) {
1197    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
1198    __ movl(temp, Address(temp, class_offset));
1199  } else {
1200    __ movl(temp, Address(receiver.As<Register>(), class_offset));
1201  }
1202  // temp = temp->GetImtEntryAt(method_offset);
1203  __ movl(temp, Address(temp, method_offset));
1204  // call temp->GetEntryPoint();
1205  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
1206
1207  DCHECK(!codegen_->IsLeafMethod());
1208  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1209}
1210
1211void LocationsBuilderX86::VisitNeg(HNeg* neg) {
1212  LocationSummary* locations =
1213      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1214  switch (neg->GetResultType()) {
1215    case Primitive::kPrimInt:
1216    case Primitive::kPrimLong:
1217      locations->SetInAt(0, Location::RequiresRegister());
1218      locations->SetOut(Location::SameAsFirstInput());
1219      break;
1220
1221    case Primitive::kPrimFloat:
1222    case Primitive::kPrimDouble:
1223      locations->SetInAt(0, Location::RequiresFpuRegister());
1224      // Output overlaps as we need a fresh (zero-initialized)
1225      // register to perform subtraction from zero.
1226      locations->SetOut(Location::RequiresFpuRegister());
1227      break;
1228
1229    default:
1230      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1231  }
1232}
1233
1234void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
1235  LocationSummary* locations = neg->GetLocations();
1236  Location out = locations->Out();
1237  Location in = locations->InAt(0);
1238  switch (neg->GetResultType()) {
1239    case Primitive::kPrimInt:
1240      DCHECK(in.IsRegister());
1241      DCHECK(in.Equals(out));
1242      __ negl(out.As<Register>());
1243      break;
1244
1245    case Primitive::kPrimLong:
1246      DCHECK(in.IsRegisterPair());
1247      DCHECK(in.Equals(out));
1248      __ negl(out.AsRegisterPairLow<Register>());
1249      // Negation is similar to subtraction from zero.  The least
1250      // significant byte triggers a borrow when it is different from
1251      // zero; to take it into account, add 1 to the most significant
1252      // byte if the carry flag (CF) is set to 1 after the first NEGL
1253      // operation.
1254      __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
1255      __ negl(out.AsRegisterPairHigh<Register>());
1256      break;
1257
1258    case Primitive::kPrimFloat:
1259      DCHECK(!in.Equals(out));
1260      // out = 0
1261      __ xorps(out.As<XmmRegister>(), out.As<XmmRegister>());
1262      // out = out - in
1263      __ subss(out.As<XmmRegister>(), in.As<XmmRegister>());
1264      break;
1265
1266    case Primitive::kPrimDouble:
1267      DCHECK(!in.Equals(out));
1268      // out = 0
1269      __ xorpd(out.As<XmmRegister>(), out.As<XmmRegister>());
1270      // out = out - in
1271      __ subsd(out.As<XmmRegister>(), in.As<XmmRegister>());
1272      break;
1273
1274    default:
1275      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1276  }
1277}
1278
1279void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
1280  LocationSummary* locations =
1281      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1282  Primitive::Type result_type = conversion->GetResultType();
1283  Primitive::Type input_type = conversion->GetInputType();
1284  switch (result_type) {
1285    case Primitive::kPrimByte:
1286      switch (input_type) {
1287        case Primitive::kPrimShort:
1288        case Primitive::kPrimInt:
1289        case Primitive::kPrimChar:
1290          // int-to-byte conversion.
1291          locations->SetInAt(0, Location::Any());
1292          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1293          break;
1294
1295        default:
1296          LOG(FATAL) << "Unexpected type conversion from " << input_type
1297                     << " to " << result_type;
1298      }
1299      break;
1300
1301    case Primitive::kPrimInt:
1302      switch (input_type) {
1303        case Primitive::kPrimLong:
1304          // long-to-int conversion.
1305          locations->SetInAt(0, Location::Any());
1306          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1307          break;
1308
1309        case Primitive::kPrimFloat:
1310        case Primitive::kPrimDouble:
1311          LOG(FATAL) << "Type conversion from " << input_type
1312                     << " to " << result_type << " not yet implemented";
1313          break;
1314
1315        default:
1316          LOG(FATAL) << "Unexpected type conversion from " << input_type
1317                     << " to " << result_type;
1318      }
1319      break;
1320
1321    case Primitive::kPrimLong:
1322      switch (input_type) {
1323        case Primitive::kPrimByte:
1324        case Primitive::kPrimShort:
1325        case Primitive::kPrimInt:
1326        case Primitive::kPrimChar:
1327          // int-to-long conversion.
1328          locations->SetInAt(0, Location::RegisterLocation(EAX));
1329          locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1330          break;
1331
1332        case Primitive::kPrimFloat:
1333        case Primitive::kPrimDouble:
1334          LOG(FATAL) << "Type conversion from " << input_type << " to "
1335                     << result_type << " not yet implemented";
1336          break;
1337
1338        default:
1339          LOG(FATAL) << "Unexpected type conversion from " << input_type
1340                     << " to " << result_type;
1341      }
1342      break;
1343
1344    case Primitive::kPrimFloat:
1345    case Primitive::kPrimDouble:
1346      LOG(FATAL) << "Type conversion from " << input_type
1347                 << " to " << result_type << " not yet implemented";
1348      break;
1349
1350    default:
1351      LOG(FATAL) << "Unexpected type conversion from " << input_type
1352                 << " to " << result_type;
1353  }
1354}
1355
1356void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
1357  LocationSummary* locations = conversion->GetLocations();
1358  Location out = locations->Out();
1359  Location in = locations->InAt(0);
1360  Primitive::Type result_type = conversion->GetResultType();
1361  Primitive::Type input_type = conversion->GetInputType();
1362  switch (result_type) {
1363    case Primitive::kPrimByte:
1364      switch (input_type) {
1365        case Primitive::kPrimShort:
1366        case Primitive::kPrimInt:
1367        case Primitive::kPrimChar:
1368          // int-to-byte conversion.
1369          if (in.IsRegister()) {
1370            __ movsxb(out.As<Register>(), in.As<ByteRegister>());
1371          } else if (in.IsStackSlot()) {
1372            __ movsxb(out.As<Register>(), Address(ESP, in.GetStackIndex()));
1373          } else {
1374            DCHECK(in.GetConstant()->IsIntConstant());
1375            int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
1376            __ movl(out.As<Register>(), Immediate(static_cast<int8_t>(value)));
1377          }
1378          break;
1379
1380        default:
1381          LOG(FATAL) << "Unexpected type conversion from " << input_type
1382                     << " to " << result_type;
1383      }
1384      break;
1385
1386    case Primitive::kPrimInt:
1387      switch (input_type) {
1388        case Primitive::kPrimLong:
1389          // long-to-int conversion.
1390          if (in.IsRegisterPair()) {
1391            __ movl(out.As<Register>(), in.AsRegisterPairLow<Register>());
1392          } else if (in.IsDoubleStackSlot()) {
1393            __ movl(out.As<Register>(), Address(ESP, in.GetStackIndex()));
1394          } else {
1395            DCHECK(in.IsConstant());
1396            DCHECK(in.GetConstant()->IsLongConstant());
1397            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1398            __ movl(out.As<Register>(), Immediate(static_cast<int32_t>(value)));
1399          }
1400          break;
1401
1402        case Primitive::kPrimFloat:
1403        case Primitive::kPrimDouble:
1404          LOG(FATAL) << "Type conversion from " << input_type
1405                     << " to " << result_type << " not yet implemented";
1406          break;
1407
1408        default:
1409          LOG(FATAL) << "Unexpected type conversion from " << input_type
1410                     << " to " << result_type;
1411      }
1412      break;
1413
1414    case Primitive::kPrimLong:
1415      switch (input_type) {
1416        case Primitive::kPrimByte:
1417        case Primitive::kPrimShort:
1418        case Primitive::kPrimInt:
1419        case Primitive::kPrimChar:
1420          // int-to-long conversion.
1421          DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
1422          DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
1423          DCHECK_EQ(in.As<Register>(), EAX);
1424          __ cdq();
1425          break;
1426
1427        case Primitive::kPrimFloat:
1428        case Primitive::kPrimDouble:
1429          LOG(FATAL) << "Type conversion from " << input_type << " to "
1430                     << result_type << " not yet implemented";
1431          break;
1432
1433        default:
1434          LOG(FATAL) << "Unexpected type conversion from " << input_type
1435                     << " to " << result_type;
1436      }
1437      break;
1438
1439    case Primitive::kPrimFloat:
1440    case Primitive::kPrimDouble:
1441      LOG(FATAL) << "Type conversion from " << input_type
1442                 << " to " << result_type << " not yet implemented";
1443      break;
1444
1445    default:
1446      LOG(FATAL) << "Unexpected type conversion from " << input_type
1447                 << " to " << result_type;
1448  }
1449}
1450
1451void LocationsBuilderX86::VisitAdd(HAdd* add) {
1452  LocationSummary* locations =
1453      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1454  switch (add->GetResultType()) {
1455    case Primitive::kPrimInt:
1456    case Primitive::kPrimLong: {
1457      locations->SetInAt(0, Location::RequiresRegister());
1458      locations->SetInAt(1, Location::Any());
1459      locations->SetOut(Location::SameAsFirstInput());
1460      break;
1461    }
1462
1463    case Primitive::kPrimFloat:
1464    case Primitive::kPrimDouble: {
1465      locations->SetInAt(0, Location::RequiresFpuRegister());
1466      locations->SetInAt(1, Location::Any());
1467      locations->SetOut(Location::SameAsFirstInput());
1468      break;
1469    }
1470
1471    default:
1472      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1473      break;
1474  }
1475}
1476
1477void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
1478  LocationSummary* locations = add->GetLocations();
1479  Location first = locations->InAt(0);
1480  Location second = locations->InAt(1);
1481  DCHECK(first.Equals(locations->Out()));
1482  switch (add->GetResultType()) {
1483    case Primitive::kPrimInt: {
1484      if (second.IsRegister()) {
1485        __ addl(first.As<Register>(), second.As<Register>());
1486      } else if (second.IsConstant()) {
1487        __ addl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
1488      } else {
1489        __ addl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
1490      }
1491      break;
1492    }
1493
1494    case Primitive::kPrimLong: {
1495      if (second.IsRegisterPair()) {
1496        __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
1497        __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
1498      } else {
1499        __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
1500        __ adcl(first.AsRegisterPairHigh<Register>(),
1501                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
1502      }
1503      break;
1504    }
1505
1506    case Primitive::kPrimFloat: {
1507      if (second.IsFpuRegister()) {
1508        __ addss(first.As<XmmRegister>(), second.As<XmmRegister>());
1509      } else {
1510        __ addss(first.As<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1511      }
1512      break;
1513    }
1514
1515    case Primitive::kPrimDouble: {
1516      if (second.IsFpuRegister()) {
1517        __ addsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1518      } else {
1519        __ addsd(first.As<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1520      }
1521      break;
1522    }
1523
1524    default:
1525      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1526  }
1527}
1528
1529void LocationsBuilderX86::VisitSub(HSub* sub) {
1530  LocationSummary* locations =
1531      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1532  switch (sub->GetResultType()) {
1533    case Primitive::kPrimInt:
1534    case Primitive::kPrimLong: {
1535      locations->SetInAt(0, Location::RequiresRegister());
1536      locations->SetInAt(1, Location::Any());
1537      locations->SetOut(Location::SameAsFirstInput());
1538      break;
1539    }
1540    case Primitive::kPrimFloat:
1541    case Primitive::kPrimDouble: {
1542      locations->SetInAt(0, Location::RequiresFpuRegister());
1543      locations->SetInAt(1, Location::RequiresFpuRegister());
1544      locations->SetOut(Location::SameAsFirstInput());
1545      break;
1546    }
1547
1548    default:
1549      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1550  }
1551}
1552
1553void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
1554  LocationSummary* locations = sub->GetLocations();
1555  Location first = locations->InAt(0);
1556  Location second = locations->InAt(1);
1557  DCHECK(first.Equals(locations->Out()));
1558  switch (sub->GetResultType()) {
1559    case Primitive::kPrimInt: {
1560      if (second.IsRegister()) {
1561        __ subl(first.As<Register>(), second.As<Register>());
1562      } else if (second.IsConstant()) {
1563        __ subl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
1564      } else {
1565        __ subl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
1566      }
1567      break;
1568    }
1569
1570    case Primitive::kPrimLong: {
1571      if (second.IsRegisterPair()) {
1572        __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
1573        __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
1574      } else {
1575        __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
1576        __ sbbl(first.AsRegisterPairHigh<Register>(),
1577                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
1578      }
1579      break;
1580    }
1581
1582    case Primitive::kPrimFloat: {
1583      __ subss(first.As<XmmRegister>(), second.As<XmmRegister>());
1584      break;
1585    }
1586
1587    case Primitive::kPrimDouble: {
1588      __ subsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1589      break;
1590    }
1591
1592    default:
1593      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1594  }
1595}
1596
1597void LocationsBuilderX86::VisitMul(HMul* mul) {
1598  LocationSummary* locations =
1599      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1600  switch (mul->GetResultType()) {
1601    case Primitive::kPrimInt:
1602      locations->SetInAt(0, Location::RequiresRegister());
1603      locations->SetInAt(1, Location::Any());
1604      locations->SetOut(Location::SameAsFirstInput());
1605      break;
1606    case Primitive::kPrimLong: {
1607      locations->SetInAt(0, Location::RequiresRegister());
1608      // TODO: Currently this handles only stack operands:
1609      // - we don't have enough registers because we currently use Quick ABI.
1610      // - by the time we have a working register allocator we will probably change the ABI
1611      // and fix the above.
1612      // - we don't have a way yet to request operands on stack but the base line compiler
1613      // will leave the operands on the stack with Any().
1614      locations->SetInAt(1, Location::Any());
1615      locations->SetOut(Location::SameAsFirstInput());
1616      // Needed for imul on 32bits with 64bits output.
1617      locations->AddTemp(Location::RegisterLocation(EAX));
1618      locations->AddTemp(Location::RegisterLocation(EDX));
1619      break;
1620    }
1621    case Primitive::kPrimFloat:
1622    case Primitive::kPrimDouble: {
1623      locations->SetInAt(0, Location::RequiresFpuRegister());
1624      locations->SetInAt(1, Location::RequiresFpuRegister());
1625      locations->SetOut(Location::SameAsFirstInput());
1626      break;
1627    }
1628
1629    default:
1630      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1631  }
1632}
1633
1634void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
1635  LocationSummary* locations = mul->GetLocations();
1636  Location first = locations->InAt(0);
1637  Location second = locations->InAt(1);
1638  DCHECK(first.Equals(locations->Out()));
1639
1640  switch (mul->GetResultType()) {
1641    case Primitive::kPrimInt: {
1642      if (second.IsRegister()) {
1643        __ imull(first.As<Register>(), second.As<Register>());
1644      } else if (second.IsConstant()) {
1645        Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
1646        __ imull(first.As<Register>(), imm);
1647      } else {
1648        DCHECK(second.IsStackSlot());
1649        __ imull(first.As<Register>(), Address(ESP, second.GetStackIndex()));
1650      }
1651      break;
1652    }
1653
1654    case Primitive::kPrimLong: {
1655      DCHECK(second.IsDoubleStackSlot());
1656
1657      Register in1_hi = first.AsRegisterPairHigh<Register>();
1658      Register in1_lo = first.AsRegisterPairLow<Register>();
1659      Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
1660      Address in2_lo(ESP, second.GetStackIndex());
1661      Register eax = locations->GetTemp(0).As<Register>();
1662      Register edx = locations->GetTemp(1).As<Register>();
1663
1664      DCHECK_EQ(EAX, eax);
1665      DCHECK_EQ(EDX, edx);
1666
1667      // input: in1 - 64 bits, in2 - 64 bits
1668      // output: in1
1669      // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1670      // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1671      // parts: in1.lo = (in1.lo * in2.lo)[31:0]
1672
1673      __ movl(eax, in2_hi);
1674      // eax <- in1.lo * in2.hi
1675      __ imull(eax, in1_lo);
1676      // in1.hi <- in1.hi * in2.lo
1677      __ imull(in1_hi, in2_lo);
1678      // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1679      __ addl(in1_hi, eax);
1680      // move in1_lo to eax to prepare for double precision
1681      __ movl(eax, in1_lo);
1682      // edx:eax <- in1.lo * in2.lo
1683      __ mull(in2_lo);
1684      // in1.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1685      __ addl(in1_hi, edx);
1686      // in1.lo <- (in1.lo * in2.lo)[31:0];
1687      __ movl(in1_lo, eax);
1688
1689      break;
1690    }
1691
1692    case Primitive::kPrimFloat: {
1693      __ mulss(first.As<XmmRegister>(), second.As<XmmRegister>());
1694      break;
1695    }
1696
1697    case Primitive::kPrimDouble: {
1698      __ mulsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1699      break;
1700    }
1701
1702    default:
1703      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1704  }
1705}
1706
1707void LocationsBuilderX86::VisitDiv(HDiv* div) {
1708  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
1709      ? LocationSummary::kCall
1710      : LocationSummary::kNoCall;
1711  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
1712
1713  switch (div->GetResultType()) {
1714    case Primitive::kPrimInt: {
1715      locations->SetInAt(0, Location::RegisterLocation(EAX));
1716      locations->SetInAt(1, Location::RequiresRegister());
1717      locations->SetOut(Location::SameAsFirstInput());
1718      // Intel uses edx:eax as the dividend.
1719      locations->AddTemp(Location::RegisterLocation(EDX));
1720      break;
1721    }
1722    case Primitive::kPrimLong: {
1723      InvokeRuntimeCallingConvention calling_convention;
1724      locations->SetInAt(0, Location::RegisterPairLocation(
1725          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
1726      locations->SetInAt(1, Location::RegisterPairLocation(
1727          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
1728      // Runtime helper puts the result in EAX, EDX.
1729      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1730      break;
1731    }
1732    case Primitive::kPrimFloat:
1733    case Primitive::kPrimDouble: {
1734      locations->SetInAt(0, Location::RequiresFpuRegister());
1735      locations->SetInAt(1, Location::RequiresFpuRegister());
1736      locations->SetOut(Location::SameAsFirstInput());
1737      break;
1738    }
1739
1740    default:
1741      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1742  }
1743}
1744
1745void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
1746  LocationSummary* locations = div->GetLocations();
1747  Location out = locations->Out();
1748  Location first = locations->InAt(0);
1749  Location second = locations->InAt(1);
1750
1751  switch (div->GetResultType()) {
1752    case Primitive::kPrimInt: {
1753      DCHECK(first.Equals(out));
1754      Register first_reg = first.As<Register>();
1755      Register second_reg = second.As<Register>();
1756      DCHECK_EQ(EAX, first_reg);
1757      DCHECK_EQ(EDX, locations->GetTemp(0).As<Register>());
1758
1759      SlowPathCodeX86* slow_path =
1760          new (GetGraph()->GetArena()) DivMinusOneSlowPathX86(first_reg);
1761      codegen_->AddSlowPath(slow_path);
1762
1763      // 0x80000000/-1 triggers an arithmetic exception!
1764      // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
1765      // it's safe to just use negl instead of more complex comparisons.
1766
1767      __ cmpl(second_reg, Immediate(-1));
1768      __ j(kEqual, slow_path->GetEntryLabel());
1769
1770      // edx:eax <- sign-extended of eax
1771      __ cdq();
1772      // eax = quotient, edx = remainder
1773      __ idivl(second_reg);
1774
1775      __ Bind(slow_path->GetExitLabel());
1776      break;
1777    }
1778
1779    case Primitive::kPrimLong: {
1780      InvokeRuntimeCallingConvention calling_convention;
1781      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
1782      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
1783      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
1784      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
1785      DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
1786      DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
1787
1788      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLdiv)));
1789      codegen_->RecordPcInfo(div, div->GetDexPc());
1790
1791      break;
1792    }
1793
1794    case Primitive::kPrimFloat: {
1795      DCHECK(first.Equals(out));
1796      __ divss(first.As<XmmRegister>(), second.As<XmmRegister>());
1797      break;
1798    }
1799
1800    case Primitive::kPrimDouble: {
1801      DCHECK(first.Equals(out));
1802      __ divsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1803      break;
1804    }
1805
1806    default:
1807      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1808  }
1809}
1810
1811void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1812  LocationSummary* locations =
1813      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1814  switch (instruction->GetType()) {
1815    case Primitive::kPrimInt: {
1816      locations->SetInAt(0, Location::Any());
1817      break;
1818    }
1819    case Primitive::kPrimLong: {
1820      locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1821      if (!instruction->IsConstant()) {
1822        locations->AddTemp(Location::RequiresRegister());
1823      }
1824      break;
1825    }
1826    default:
1827      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
1828  }
1829  if (instruction->HasUses()) {
1830    locations->SetOut(Location::SameAsFirstInput());
1831  }
1832}
1833
1834void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1835  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86(instruction);
1836  codegen_->AddSlowPath(slow_path);
1837
1838  LocationSummary* locations = instruction->GetLocations();
1839  Location value = locations->InAt(0);
1840
1841  switch (instruction->GetType()) {
1842    case Primitive::kPrimInt: {
1843      if (value.IsRegister()) {
1844        __ testl(value.As<Register>(), value.As<Register>());
1845        __ j(kEqual, slow_path->GetEntryLabel());
1846      } else if (value.IsStackSlot()) {
1847        __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
1848        __ j(kEqual, slow_path->GetEntryLabel());
1849      } else {
1850        DCHECK(value.IsConstant()) << value;
1851        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
1852        __ jmp(slow_path->GetEntryLabel());
1853        }
1854      }
1855      break;
1856    }
1857    case Primitive::kPrimLong: {
1858      if (value.IsRegisterPair()) {
1859        Register temp = locations->GetTemp(0).As<Register>();
1860        __ movl(temp, value.AsRegisterPairLow<Register>());
1861        __ orl(temp, value.AsRegisterPairHigh<Register>());
1862        __ j(kEqual, slow_path->GetEntryLabel());
1863      } else {
1864        DCHECK(value.IsConstant()) << value;
1865        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
1866          __ jmp(slow_path->GetEntryLabel());
1867        }
1868      }
1869      break;
1870    }
1871    default:
1872      LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
1873  }
1874}
1875
1876void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
1877  LocationSummary* locations =
1878      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1879  locations->SetOut(Location::RegisterLocation(EAX));
1880  InvokeRuntimeCallingConvention calling_convention;
1881  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1882  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1883}
1884
1885void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
1886  InvokeRuntimeCallingConvention calling_convention;
1887  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1888  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
1889
1890  __ fs()->call(
1891      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
1892
1893  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1894  DCHECK(!codegen_->IsLeafMethod());
1895}
1896
1897void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
1898  LocationSummary* locations =
1899      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1900  locations->SetOut(Location::RegisterLocation(EAX));
1901  InvokeRuntimeCallingConvention calling_convention;
1902  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1903  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1904  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1905}
1906
1907void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
1908  InvokeRuntimeCallingConvention calling_convention;
1909  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1910  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
1911
1912  __ fs()->call(
1913      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocArrayWithAccessCheck)));
1914
1915  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1916  DCHECK(!codegen_->IsLeafMethod());
1917}
1918
1919void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
1920  LocationSummary* locations =
1921      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1922  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1923  if (location.IsStackSlot()) {
1924    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1925  } else if (location.IsDoubleStackSlot()) {
1926    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1927  }
1928  locations->SetOut(location);
1929}
1930
1931void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
1932  UNUSED(instruction);
1933}
1934
1935void LocationsBuilderX86::VisitNot(HNot* not_) {
1936  LocationSummary* locations =
1937      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1938  locations->SetInAt(0, Location::RequiresRegister());
1939  locations->SetOut(Location::SameAsFirstInput());
1940}
1941
1942void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
1943  LocationSummary* locations = not_->GetLocations();
1944  Location in = locations->InAt(0);
1945  Location out = locations->Out();
1946  DCHECK(in.Equals(out));
1947  switch (not_->InputAt(0)->GetType()) {
1948    case Primitive::kPrimBoolean:
1949      __ xorl(out.As<Register>(), Immediate(1));
1950      break;
1951
1952    case Primitive::kPrimInt:
1953      __ notl(out.As<Register>());
1954      break;
1955
1956    case Primitive::kPrimLong:
1957      __ notl(out.AsRegisterPairLow<Register>());
1958      __ notl(out.AsRegisterPairHigh<Register>());
1959      break;
1960
1961    default:
1962      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
1963  }
1964}
1965
1966void LocationsBuilderX86::VisitCompare(HCompare* compare) {
1967  LocationSummary* locations =
1968      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1969  locations->SetInAt(0, Location::RequiresRegister());
1970  locations->SetInAt(1, Location::Any());
1971  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1972}
1973
1974void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
1975  LocationSummary* locations = compare->GetLocations();
1976  switch (compare->InputAt(0)->GetType()) {
1977    case Primitive::kPrimLong: {
1978      Label less, greater, done;
1979      Register output = locations->Out().As<Register>();
1980      Location left = locations->InAt(0);
1981      Location right = locations->InAt(1);
1982      if (right.IsRegister()) {
1983        __ cmpl(left.AsRegisterPairHigh<Register>(), right.AsRegisterPairHigh<Register>());
1984      } else {
1985        DCHECK(right.IsDoubleStackSlot());
1986        __ cmpl(left.AsRegisterPairHigh<Register>(),
1987                Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1988      }
1989      __ j(kLess, &less);  // Signed compare.
1990      __ j(kGreater, &greater);  // Signed compare.
1991      if (right.IsRegisterPair()) {
1992        __ cmpl(left.AsRegisterPairLow<Register>(), right.AsRegisterPairLow<Register>());
1993      } else {
1994        DCHECK(right.IsDoubleStackSlot());
1995        __ cmpl(left.AsRegisterPairLow<Register>(), Address(ESP, right.GetStackIndex()));
1996      }
1997      __ movl(output, Immediate(0));
1998      __ j(kEqual, &done);
1999      __ j(kBelow, &less);  // Unsigned compare.
2000
2001      __ Bind(&greater);
2002      __ movl(output, Immediate(1));
2003      __ jmp(&done);
2004
2005      __ Bind(&less);
2006      __ movl(output, Immediate(-1));
2007
2008      __ Bind(&done);
2009      break;
2010    }
2011    default:
2012      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
2013  }
2014}
2015
2016void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
2017  LocationSummary* locations =
2018      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2019  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2020    locations->SetInAt(i, Location::Any());
2021  }
2022  locations->SetOut(Location::Any());
2023}
2024
2025void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
2026  UNUSED(instruction);
2027  LOG(FATAL) << "Unreachable";
2028}
2029
2030void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2031  LocationSummary* locations =
2032      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2033  locations->SetInAt(0, Location::RequiresRegister());
2034  Primitive::Type field_type = instruction->GetFieldType();
2035  bool is_object_type = field_type == Primitive::kPrimNot;
2036  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
2037      || (field_type == Primitive::kPrimByte);
2038  // The register allocator does not support multiple
2039  // inputs that die at entry with one in a specific register.
2040  if (is_byte_type) {
2041    // Ensure the value is in a byte register.
2042    locations->SetInAt(1, Location::RegisterLocation(EAX));
2043  } else {
2044    locations->SetInAt(1, Location::RequiresRegister());
2045  }
2046  // Temporary registers for the write barrier.
2047  if (is_object_type) {
2048    locations->AddTemp(Location::RequiresRegister());
2049    // Ensure the card is in a byte register.
2050    locations->AddTemp(Location::RegisterLocation(ECX));
2051  }
2052}
2053
2054void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2055  LocationSummary* locations = instruction->GetLocations();
2056  Register obj = locations->InAt(0).As<Register>();
2057  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2058  Primitive::Type field_type = instruction->GetFieldType();
2059
2060  switch (field_type) {
2061    case Primitive::kPrimBoolean:
2062    case Primitive::kPrimByte: {
2063      ByteRegister value = locations->InAt(1).As<ByteRegister>();
2064      __ movb(Address(obj, offset), value);
2065      break;
2066    }
2067
2068    case Primitive::kPrimShort:
2069    case Primitive::kPrimChar: {
2070      Register value = locations->InAt(1).As<Register>();
2071      __ movw(Address(obj, offset), value);
2072      break;
2073    }
2074
2075    case Primitive::kPrimInt:
2076    case Primitive::kPrimNot: {
2077      Register value = locations->InAt(1).As<Register>();
2078      __ movl(Address(obj, offset), value);
2079
2080      if (field_type == Primitive::kPrimNot) {
2081        Register temp = locations->GetTemp(0).As<Register>();
2082        Register card = locations->GetTemp(1).As<Register>();
2083        codegen_->MarkGCCard(temp, card, obj, value);
2084      }
2085      break;
2086    }
2087
2088    case Primitive::kPrimLong: {
2089      Location value = locations->InAt(1);
2090      __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
2091      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
2092      break;
2093    }
2094
2095    case Primitive::kPrimFloat: {
2096      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2097      __ movss(Address(obj, offset), value);
2098      break;
2099    }
2100
2101    case Primitive::kPrimDouble: {
2102      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2103      __ movsd(Address(obj, offset), value);
2104      break;
2105    }
2106
2107    case Primitive::kPrimVoid:
2108      LOG(FATAL) << "Unreachable type " << field_type;
2109      UNREACHABLE();
2110  }
2111}
2112
2113void CodeGeneratorX86::MarkGCCard(Register temp, Register card, Register object, Register value) {
2114  Label is_null;
2115  __ testl(value, value);
2116  __ j(kEqual, &is_null);
2117  __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
2118  __ movl(temp, object);
2119  __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
2120  __ movb(Address(temp, card, TIMES_1, 0),
2121          X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
2122  __ Bind(&is_null);
2123}
2124
2125void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2126  LocationSummary* locations =
2127      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2128  locations->SetInAt(0, Location::RequiresRegister());
2129  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2130}
2131
2132void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2133  LocationSummary* locations = instruction->GetLocations();
2134  Register obj = locations->InAt(0).As<Register>();
2135  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2136
2137  switch (instruction->GetType()) {
2138    case Primitive::kPrimBoolean: {
2139      Register out = locations->Out().As<Register>();
2140      __ movzxb(out, Address(obj, offset));
2141      break;
2142    }
2143
2144    case Primitive::kPrimByte: {
2145      Register out = locations->Out().As<Register>();
2146      __ movsxb(out, Address(obj, offset));
2147      break;
2148    }
2149
2150    case Primitive::kPrimShort: {
2151      Register out = locations->Out().As<Register>();
2152      __ movsxw(out, Address(obj, offset));
2153      break;
2154    }
2155
2156    case Primitive::kPrimChar: {
2157      Register out = locations->Out().As<Register>();
2158      __ movzxw(out, Address(obj, offset));
2159      break;
2160    }
2161
2162    case Primitive::kPrimInt:
2163    case Primitive::kPrimNot: {
2164      Register out = locations->Out().As<Register>();
2165      __ movl(out, Address(obj, offset));
2166      break;
2167    }
2168
2169    case Primitive::kPrimLong: {
2170      // TODO: support volatile.
2171      __ movl(locations->Out().AsRegisterPairLow<Register>(), Address(obj, offset));
2172      __ movl(locations->Out().AsRegisterPairHigh<Register>(), Address(obj, kX86WordSize + offset));
2173      break;
2174    }
2175
2176    case Primitive::kPrimFloat: {
2177      XmmRegister out = locations->Out().As<XmmRegister>();
2178      __ movss(out, Address(obj, offset));
2179      break;
2180    }
2181
2182    case Primitive::kPrimDouble: {
2183      XmmRegister out = locations->Out().As<XmmRegister>();
2184      __ movsd(out, Address(obj, offset));
2185      break;
2186    }
2187
2188    case Primitive::kPrimVoid:
2189      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2190      UNREACHABLE();
2191  }
2192}
2193
2194void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
2195  LocationSummary* locations =
2196      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2197  locations->SetInAt(0, Location::Any());
2198  if (instruction->HasUses()) {
2199    locations->SetOut(Location::SameAsFirstInput());
2200  }
2201}
2202
2203void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
2204  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction);
2205  codegen_->AddSlowPath(slow_path);
2206
2207  LocationSummary* locations = instruction->GetLocations();
2208  Location obj = locations->InAt(0);
2209
2210  if (obj.IsRegister()) {
2211    __ cmpl(obj.As<Register>(), Immediate(0));
2212  } else if (obj.IsStackSlot()) {
2213    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
2214  } else {
2215    DCHECK(obj.IsConstant()) << obj;
2216    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2217    __ jmp(slow_path->GetEntryLabel());
2218    return;
2219  }
2220  __ j(kEqual, slow_path->GetEntryLabel());
2221}
2222
2223void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
2224  LocationSummary* locations =
2225      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2226  locations->SetInAt(0, Location::RequiresRegister());
2227  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2228  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2229}
2230
2231void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
2232  LocationSummary* locations = instruction->GetLocations();
2233  Register obj = locations->InAt(0).As<Register>();
2234  Location index = locations->InAt(1);
2235
2236  switch (instruction->GetType()) {
2237    case Primitive::kPrimBoolean: {
2238      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2239      Register out = locations->Out().As<Register>();
2240      if (index.IsConstant()) {
2241        __ movzxb(out, Address(obj,
2242            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
2243      } else {
2244        __ movzxb(out, Address(obj, index.As<Register>(), TIMES_1, data_offset));
2245      }
2246      break;
2247    }
2248
2249    case Primitive::kPrimByte: {
2250      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2251      Register out = locations->Out().As<Register>();
2252      if (index.IsConstant()) {
2253        __ movsxb(out, Address(obj,
2254            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
2255      } else {
2256        __ movsxb(out, Address(obj, index.As<Register>(), TIMES_1, data_offset));
2257      }
2258      break;
2259    }
2260
2261    case Primitive::kPrimShort: {
2262      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2263      Register out = locations->Out().As<Register>();
2264      if (index.IsConstant()) {
2265        __ movsxw(out, Address(obj,
2266            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
2267      } else {
2268        __ movsxw(out, Address(obj, index.As<Register>(), TIMES_2, data_offset));
2269      }
2270      break;
2271    }
2272
2273    case Primitive::kPrimChar: {
2274      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2275      Register out = locations->Out().As<Register>();
2276      if (index.IsConstant()) {
2277        __ movzxw(out, Address(obj,
2278            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
2279      } else {
2280        __ movzxw(out, Address(obj, index.As<Register>(), TIMES_2, data_offset));
2281      }
2282      break;
2283    }
2284
2285    case Primitive::kPrimInt:
2286    case Primitive::kPrimNot: {
2287      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2288      Register out = locations->Out().As<Register>();
2289      if (index.IsConstant()) {
2290        __ movl(out, Address(obj,
2291            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
2292      } else {
2293        __ movl(out, Address(obj, index.As<Register>(), TIMES_4, data_offset));
2294      }
2295      break;
2296    }
2297
2298    case Primitive::kPrimLong: {
2299      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2300      Location out = locations->Out();
2301      if (index.IsConstant()) {
2302        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2303        __ movl(out.AsRegisterPairLow<Register>(), Address(obj, offset));
2304        __ movl(out.AsRegisterPairHigh<Register>(), Address(obj, offset + kX86WordSize));
2305      } else {
2306        __ movl(out.AsRegisterPairLow<Register>(),
2307                Address(obj, index.As<Register>(), TIMES_8, data_offset));
2308        __ movl(out.AsRegisterPairHigh<Register>(),
2309                Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize));
2310      }
2311      break;
2312    }
2313
2314    case Primitive::kPrimFloat:
2315    case Primitive::kPrimDouble:
2316      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2317      UNREACHABLE();
2318    case Primitive::kPrimVoid:
2319      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2320      UNREACHABLE();
2321  }
2322}
2323
2324void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
2325  Primitive::Type value_type = instruction->GetComponentType();
2326  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2327      instruction,
2328      value_type == Primitive::kPrimNot ? LocationSummary::kCall : LocationSummary::kNoCall);
2329
2330  if (value_type == Primitive::kPrimNot) {
2331    InvokeRuntimeCallingConvention calling_convention;
2332    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2333    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2334    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2335  } else {
2336    bool is_byte_type = (value_type == Primitive::kPrimBoolean)
2337        || (value_type == Primitive::kPrimByte);
2338    // We need the inputs to be different than the output in case of long operation.
2339    // In case of a byte operation, the register allocator does not support multiple
2340    // inputs that die at entry with one in a specific register.
2341    locations->SetInAt(0, Location::RequiresRegister());
2342    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2343    if (is_byte_type) {
2344      // Ensure the value is in a byte register.
2345      locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
2346    } else {
2347      locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
2348    }
2349  }
2350}
2351
2352void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
2353  LocationSummary* locations = instruction->GetLocations();
2354  Register obj = locations->InAt(0).As<Register>();
2355  Location index = locations->InAt(1);
2356  Location value = locations->InAt(2);
2357  Primitive::Type value_type = instruction->GetComponentType();
2358
2359  switch (value_type) {
2360    case Primitive::kPrimBoolean:
2361    case Primitive::kPrimByte: {
2362      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2363      if (index.IsConstant()) {
2364        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2365        if (value.IsRegister()) {
2366          __ movb(Address(obj, offset), value.As<ByteRegister>());
2367        } else {
2368          __ movb(Address(obj, offset),
2369                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2370        }
2371      } else {
2372        if (value.IsRegister()) {
2373          __ movb(Address(obj, index.As<Register>(), TIMES_1, data_offset),
2374                  value.As<ByteRegister>());
2375        } else {
2376          __ movb(Address(obj, index.As<Register>(), TIMES_1, data_offset),
2377                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2378        }
2379      }
2380      break;
2381    }
2382
2383    case Primitive::kPrimShort:
2384    case Primitive::kPrimChar: {
2385      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2386      if (index.IsConstant()) {
2387        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2388        if (value.IsRegister()) {
2389          __ movw(Address(obj, offset), value.As<Register>());
2390        } else {
2391          __ movw(Address(obj, offset),
2392                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2393        }
2394      } else {
2395        if (value.IsRegister()) {
2396          __ movw(Address(obj, index.As<Register>(), TIMES_2, data_offset),
2397                  value.As<Register>());
2398        } else {
2399          __ movw(Address(obj, index.As<Register>(), TIMES_2, data_offset),
2400                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2401        }
2402      }
2403      break;
2404    }
2405
2406    case Primitive::kPrimInt: {
2407      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2408      if (index.IsConstant()) {
2409        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2410        if (value.IsRegister()) {
2411          __ movl(Address(obj, offset), value.As<Register>());
2412        } else {
2413          __ movl(Address(obj, offset), Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2414        }
2415      } else {
2416        if (value.IsRegister()) {
2417          __ movl(Address(obj, index.As<Register>(), TIMES_4, data_offset),
2418                  value.As<Register>());
2419        } else {
2420          __ movl(Address(obj, index.As<Register>(), TIMES_4, data_offset),
2421                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2422        }
2423      }
2424      break;
2425    }
2426
2427    case Primitive::kPrimNot: {
2428      DCHECK(!codegen_->IsLeafMethod());
2429      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
2430      codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2431      break;
2432    }
2433
2434    case Primitive::kPrimLong: {
2435      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2436      if (index.IsConstant()) {
2437        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2438        if (value.IsRegisterPair()) {
2439          __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
2440          __ movl(Address(obj, offset + kX86WordSize), value.AsRegisterPairHigh<Register>());
2441        } else {
2442          DCHECK(value.IsConstant());
2443          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
2444          __ movl(Address(obj, offset), Immediate(Low32Bits(val)));
2445          __ movl(Address(obj, offset + kX86WordSize), Immediate(High32Bits(val)));
2446        }
2447      } else {
2448        if (value.IsRegisterPair()) {
2449          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset),
2450                  value.AsRegisterPairLow<Register>());
2451          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize),
2452                  value.AsRegisterPairHigh<Register>());
2453        } else {
2454          DCHECK(value.IsConstant());
2455          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
2456          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset),
2457                  Immediate(Low32Bits(val)));
2458          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize),
2459                  Immediate(High32Bits(val)));
2460        }
2461      }
2462      break;
2463    }
2464
2465    case Primitive::kPrimFloat:
2466    case Primitive::kPrimDouble:
2467      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2468      UNREACHABLE();
2469    case Primitive::kPrimVoid:
2470      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2471      UNREACHABLE();
2472  }
2473}
2474
2475void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
2476  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2477  locations->SetInAt(0, Location::RequiresRegister());
2478  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2479  instruction->SetLocations(locations);
2480}
2481
2482void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
2483  LocationSummary* locations = instruction->GetLocations();
2484  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
2485  Register obj = locations->InAt(0).As<Register>();
2486  Register out = locations->Out().As<Register>();
2487  __ movl(out, Address(obj, offset));
2488}
2489
2490void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
2491  LocationSummary* locations =
2492      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2493  locations->SetInAt(0, Location::RequiresRegister());
2494  locations->SetInAt(1, Location::RequiresRegister());
2495  if (instruction->HasUses()) {
2496    locations->SetOut(Location::SameAsFirstInput());
2497  }
2498}
2499
2500void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
2501  LocationSummary* locations = instruction->GetLocations();
2502  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(
2503      instruction, locations->InAt(0), locations->InAt(1));
2504  codegen_->AddSlowPath(slow_path);
2505
2506  Register index = locations->InAt(0).As<Register>();
2507  Register length = locations->InAt(1).As<Register>();
2508
2509  __ cmpl(index, length);
2510  __ j(kAboveEqual, slow_path->GetEntryLabel());
2511}
2512
2513void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
2514  temp->SetLocations(nullptr);
2515}
2516
2517void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
2518  // Nothing to do, this is driven by the code generator.
2519  UNUSED(temp);
2520}
2521
2522void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
2523  UNUSED(instruction);
2524  LOG(FATAL) << "Unreachable";
2525}
2526
2527void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
2528  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2529}
2530
2531void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
2532  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2533}
2534
2535void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
2536  HBasicBlock* block = instruction->GetBlock();
2537  if (block->GetLoopInformation() != nullptr) {
2538    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2539    // The back edge will generate the suspend check.
2540    return;
2541  }
2542  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2543    // The goto will generate the suspend check.
2544    return;
2545  }
2546  GenerateSuspendCheck(instruction, nullptr);
2547}
2548
2549void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
2550                                                       HBasicBlock* successor) {
2551  SuspendCheckSlowPathX86* slow_path =
2552      new (GetGraph()->GetArena()) SuspendCheckSlowPathX86(instruction, successor);
2553  codegen_->AddSlowPath(slow_path);
2554  __ fs()->cmpw(Address::Absolute(
2555      Thread::ThreadFlagsOffset<kX86WordSize>().Int32Value()), Immediate(0));
2556  if (successor == nullptr) {
2557    __ j(kNotEqual, slow_path->GetEntryLabel());
2558    __ Bind(slow_path->GetReturnLabel());
2559  } else {
2560    __ j(kEqual, codegen_->GetLabelOf(successor));
2561    __ jmp(slow_path->GetEntryLabel());
2562  }
2563}
2564
2565X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
2566  return codegen_->GetAssembler();
2567}
2568
2569void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
2570  ScratchRegisterScope ensure_scratch(
2571      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
2572  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
2573  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
2574  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
2575}
2576
2577void ParallelMoveResolverX86::EmitMove(size_t index) {
2578  MoveOperands* move = moves_.Get(index);
2579  Location source = move->GetSource();
2580  Location destination = move->GetDestination();
2581
2582  if (source.IsRegister()) {
2583    if (destination.IsRegister()) {
2584      __ movl(destination.As<Register>(), source.As<Register>());
2585    } else {
2586      DCHECK(destination.IsStackSlot());
2587      __ movl(Address(ESP, destination.GetStackIndex()), source.As<Register>());
2588    }
2589  } else if (source.IsStackSlot()) {
2590    if (destination.IsRegister()) {
2591      __ movl(destination.As<Register>(), Address(ESP, source.GetStackIndex()));
2592    } else {
2593      DCHECK(destination.IsStackSlot());
2594      MoveMemoryToMemory(destination.GetStackIndex(),
2595                         source.GetStackIndex());
2596    }
2597  } else if (source.IsConstant()) {
2598    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
2599    Immediate imm(instruction->AsIntConstant()->GetValue());
2600    if (destination.IsRegister()) {
2601      __ movl(destination.As<Register>(), imm);
2602    } else {
2603      __ movl(Address(ESP, destination.GetStackIndex()), imm);
2604    }
2605  } else {
2606    LOG(FATAL) << "Unimplemented";
2607  }
2608}
2609
2610void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
2611  Register suggested_scratch = reg == EAX ? EBX : EAX;
2612  ScratchRegisterScope ensure_scratch(
2613      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
2614
2615  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
2616  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
2617  __ movl(Address(ESP, mem + stack_offset), reg);
2618  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
2619}
2620
2621void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
2622  ScratchRegisterScope ensure_scratch1(
2623      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
2624
2625  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
2626  ScratchRegisterScope ensure_scratch2(
2627      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
2628
2629  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
2630  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
2631  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
2632  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
2633  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
2634  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
2635}
2636
2637void ParallelMoveResolverX86::EmitSwap(size_t index) {
2638  MoveOperands* move = moves_.Get(index);
2639  Location source = move->GetSource();
2640  Location destination = move->GetDestination();
2641
2642  if (source.IsRegister() && destination.IsRegister()) {
2643    __ xchgl(destination.As<Register>(), source.As<Register>());
2644  } else if (source.IsRegister() && destination.IsStackSlot()) {
2645    Exchange(source.As<Register>(), destination.GetStackIndex());
2646  } else if (source.IsStackSlot() && destination.IsRegister()) {
2647    Exchange(destination.As<Register>(), source.GetStackIndex());
2648  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2649    Exchange(destination.GetStackIndex(), source.GetStackIndex());
2650  } else {
2651    LOG(FATAL) << "Unimplemented";
2652  }
2653}
2654
2655void ParallelMoveResolverX86::SpillScratch(int reg) {
2656  __ pushl(static_cast<Register>(reg));
2657}
2658
2659void ParallelMoveResolverX86::RestoreScratch(int reg) {
2660  __ popl(static_cast<Register>(reg));
2661}
2662
2663void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
2664  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
2665      ? LocationSummary::kCallOnSlowPath
2666      : LocationSummary::kNoCall;
2667  LocationSummary* locations =
2668      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2669  locations->SetOut(Location::RequiresRegister());
2670}
2671
2672void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
2673  Register out = cls->GetLocations()->Out().As<Register>();
2674  if (cls->IsReferrersClass()) {
2675    DCHECK(!cls->CanCallRuntime());
2676    DCHECK(!cls->MustGenerateClinitCheck());
2677    codegen_->LoadCurrentMethod(out);
2678    __ movl(out, Address(out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()));
2679  } else {
2680    DCHECK(cls->CanCallRuntime());
2681    codegen_->LoadCurrentMethod(out);
2682    __ movl(out, Address(out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
2683    __ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
2684
2685    SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
2686        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2687    codegen_->AddSlowPath(slow_path);
2688    __ testl(out, out);
2689    __ j(kEqual, slow_path->GetEntryLabel());
2690    if (cls->MustGenerateClinitCheck()) {
2691      GenerateClassInitializationCheck(slow_path, out);
2692    } else {
2693      __ Bind(slow_path->GetExitLabel());
2694    }
2695  }
2696}
2697
2698void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
2699  LocationSummary* locations =
2700      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2701  locations->SetInAt(0, Location::RequiresRegister());
2702  if (check->HasUses()) {
2703    locations->SetOut(Location::SameAsFirstInput());
2704  }
2705}
2706
2707void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
2708  // We assume the class to not be null.
2709  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
2710      check->GetLoadClass(), check, check->GetDexPc(), true);
2711  codegen_->AddSlowPath(slow_path);
2712  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
2713}
2714
2715void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
2716    SlowPathCodeX86* slow_path, Register class_reg) {
2717  __ cmpl(Address(class_reg,  mirror::Class::StatusOffset().Int32Value()),
2718          Immediate(mirror::Class::kStatusInitialized));
2719  __ j(kLess, slow_path->GetEntryLabel());
2720  __ Bind(slow_path->GetExitLabel());
2721  // No need for memory fence, thanks to the X86 memory model.
2722}
2723
2724void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2725  LocationSummary* locations =
2726      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2727  locations->SetInAt(0, Location::RequiresRegister());
2728  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2729}
2730
2731void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2732  LocationSummary* locations = instruction->GetLocations();
2733  Register cls = locations->InAt(0).As<Register>();
2734  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2735
2736  switch (instruction->GetType()) {
2737    case Primitive::kPrimBoolean: {
2738      Register out = locations->Out().As<Register>();
2739      __ movzxb(out, Address(cls, offset));
2740      break;
2741    }
2742
2743    case Primitive::kPrimByte: {
2744      Register out = locations->Out().As<Register>();
2745      __ movsxb(out, Address(cls, offset));
2746      break;
2747    }
2748
2749    case Primitive::kPrimShort: {
2750      Register out = locations->Out().As<Register>();
2751      __ movsxw(out, Address(cls, offset));
2752      break;
2753    }
2754
2755    case Primitive::kPrimChar: {
2756      Register out = locations->Out().As<Register>();
2757      __ movzxw(out, Address(cls, offset));
2758      break;
2759    }
2760
2761    case Primitive::kPrimInt:
2762    case Primitive::kPrimNot: {
2763      Register out = locations->Out().As<Register>();
2764      __ movl(out, Address(cls, offset));
2765      break;
2766    }
2767
2768    case Primitive::kPrimLong: {
2769      // TODO: support volatile.
2770      __ movl(locations->Out().AsRegisterPairLow<Register>(), Address(cls, offset));
2771      __ movl(locations->Out().AsRegisterPairHigh<Register>(), Address(cls, kX86WordSize + offset));
2772      break;
2773    }
2774
2775    case Primitive::kPrimFloat: {
2776      XmmRegister out = locations->Out().As<XmmRegister>();
2777      __ movss(out, Address(cls, offset));
2778      break;
2779    }
2780
2781    case Primitive::kPrimDouble: {
2782      XmmRegister out = locations->Out().As<XmmRegister>();
2783      __ movsd(out, Address(cls, offset));
2784      break;
2785    }
2786
2787    case Primitive::kPrimVoid:
2788      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2789      UNREACHABLE();
2790  }
2791}
2792
2793void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2794  LocationSummary* locations =
2795      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2796  locations->SetInAt(0, Location::RequiresRegister());
2797  Primitive::Type field_type = instruction->GetFieldType();
2798  bool is_object_type = field_type == Primitive::kPrimNot;
2799  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
2800      || (field_type == Primitive::kPrimByte);
2801  // The register allocator does not support multiple
2802  // inputs that die at entry with one in a specific register.
2803  if (is_byte_type) {
2804    // Ensure the value is in a byte register.
2805    locations->SetInAt(1, Location::RegisterLocation(EAX));
2806  } else {
2807    locations->SetInAt(1, Location::RequiresRegister());
2808  }
2809  // Temporary registers for the write barrier.
2810  if (is_object_type) {
2811    locations->AddTemp(Location::RequiresRegister());
2812    // Ensure the card is in a byte register.
2813    locations->AddTemp(Location::RegisterLocation(ECX));
2814  }
2815}
2816
2817void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2818  LocationSummary* locations = instruction->GetLocations();
2819  Register cls = locations->InAt(0).As<Register>();
2820  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2821  Primitive::Type field_type = instruction->GetFieldType();
2822
2823  switch (field_type) {
2824    case Primitive::kPrimBoolean:
2825    case Primitive::kPrimByte: {
2826      ByteRegister value = locations->InAt(1).As<ByteRegister>();
2827      __ movb(Address(cls, offset), value);
2828      break;
2829    }
2830
2831    case Primitive::kPrimShort:
2832    case Primitive::kPrimChar: {
2833      Register value = locations->InAt(1).As<Register>();
2834      __ movw(Address(cls, offset), value);
2835      break;
2836    }
2837
2838    case Primitive::kPrimInt:
2839    case Primitive::kPrimNot: {
2840      Register value = locations->InAt(1).As<Register>();
2841      __ movl(Address(cls, offset), value);
2842
2843      if (field_type == Primitive::kPrimNot) {
2844        Register temp = locations->GetTemp(0).As<Register>();
2845        Register card = locations->GetTemp(1).As<Register>();
2846        codegen_->MarkGCCard(temp, card, cls, value);
2847      }
2848      break;
2849    }
2850
2851    case Primitive::kPrimLong: {
2852      Location value = locations->InAt(1);
2853      __ movl(Address(cls, offset), value.AsRegisterPairLow<Register>());
2854      __ movl(Address(cls, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
2855      break;
2856    }
2857
2858    case Primitive::kPrimFloat: {
2859      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2860      __ movss(Address(cls, offset), value);
2861      break;
2862    }
2863
2864    case Primitive::kPrimDouble: {
2865      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2866      __ movsd(Address(cls, offset), value);
2867      break;
2868    }
2869
2870    case Primitive::kPrimVoid:
2871      LOG(FATAL) << "Unreachable type " << field_type;
2872      UNREACHABLE();
2873  }
2874}
2875
2876void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
2877  LocationSummary* locations =
2878      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2879  locations->SetOut(Location::RequiresRegister());
2880}
2881
2882void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) {
2883  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
2884  codegen_->AddSlowPath(slow_path);
2885
2886  Register out = load->GetLocations()->Out().As<Register>();
2887  codegen_->LoadCurrentMethod(out);
2888  __ movl(out, Address(out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value()));
2889  __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
2890  __ testl(out, out);
2891  __ j(kEqual, slow_path->GetEntryLabel());
2892  __ Bind(slow_path->GetExitLabel());
2893}
2894
2895void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
2896  LocationSummary* locations =
2897      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2898  locations->SetOut(Location::RequiresRegister());
2899}
2900
2901void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
2902  Address address = Address::Absolute(Thread::ExceptionOffset<kX86WordSize>().Int32Value());
2903  __ fs()->movl(load->GetLocations()->Out().As<Register>(), address);
2904  __ fs()->movl(address, Immediate(0));
2905}
2906
2907void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
2908  LocationSummary* locations =
2909      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2910  InvokeRuntimeCallingConvention calling_convention;
2911  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2912}
2913
2914void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
2915  __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pDeliverException)));
2916  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2917}
2918
2919void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
2920  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
2921      ? LocationSummary::kNoCall
2922      : LocationSummary::kCallOnSlowPath;
2923  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2924  locations->SetInAt(0, Location::RequiresRegister());
2925  locations->SetInAt(1, Location::Any());
2926  locations->SetOut(Location::RequiresRegister());
2927}
2928
2929void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
2930  LocationSummary* locations = instruction->GetLocations();
2931  Register obj = locations->InAt(0).As<Register>();
2932  Location cls = locations->InAt(1);
2933  Register out = locations->Out().As<Register>();
2934  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2935  Label done, zero;
2936  SlowPathCodeX86* slow_path = nullptr;
2937
2938  // Return 0 if `obj` is null.
2939  // TODO: avoid this check if we know obj is not null.
2940  __ testl(obj, obj);
2941  __ j(kEqual, &zero);
2942  __ movl(out, Address(obj, class_offset));
2943  // Compare the class of `obj` with `cls`.
2944  if (cls.IsRegister()) {
2945    __ cmpl(out, cls.As<Register>());
2946  } else {
2947    DCHECK(cls.IsStackSlot()) << cls;
2948    __ cmpl(out, Address(ESP, cls.GetStackIndex()));
2949  }
2950
2951  if (instruction->IsClassFinal()) {
2952    // Classes must be equal for the instanceof to succeed.
2953    __ j(kNotEqual, &zero);
2954    __ movl(out, Immediate(1));
2955    __ jmp(&done);
2956  } else {
2957    // If the classes are not equal, we go into a slow path.
2958    DCHECK(locations->OnlyCallsOnSlowPath());
2959    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
2960        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
2961    codegen_->AddSlowPath(slow_path);
2962    __ j(kNotEqual, slow_path->GetEntryLabel());
2963    __ movl(out, Immediate(1));
2964    __ jmp(&done);
2965  }
2966  __ Bind(&zero);
2967  __ movl(out, Immediate(0));
2968  if (slow_path != nullptr) {
2969    __ Bind(slow_path->GetExitLabel());
2970  }
2971  __ Bind(&done);
2972}
2973
2974void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
2975  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2976      instruction, LocationSummary::kCallOnSlowPath);
2977  locations->SetInAt(0, Location::RequiresRegister());
2978  locations->SetInAt(1, Location::Any());
2979  locations->AddTemp(Location::RequiresRegister());
2980}
2981
2982void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
2983  LocationSummary* locations = instruction->GetLocations();
2984  Register obj = locations->InAt(0).As<Register>();
2985  Location cls = locations->InAt(1);
2986  Register temp = locations->GetTemp(0).As<Register>();
2987  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2988  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
2989      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
2990  codegen_->AddSlowPath(slow_path);
2991
2992  // TODO: avoid this check if we know obj is not null.
2993  __ testl(obj, obj);
2994  __ j(kEqual, slow_path->GetExitLabel());
2995  __ movl(temp, Address(obj, class_offset));
2996
2997  // Compare the class of `obj` with `cls`.
2998  if (cls.IsRegister()) {
2999    __ cmpl(temp, cls.As<Register>());
3000  } else {
3001    DCHECK(cls.IsStackSlot()) << cls;
3002    __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
3003  }
3004
3005  __ j(kNotEqual, slow_path->GetEntryLabel());
3006  __ Bind(slow_path->GetExitLabel());
3007}
3008
3009void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
3010  LocationSummary* locations =
3011      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3012  InvokeRuntimeCallingConvention calling_convention;
3013  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3014}
3015
3016void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
3017  __ fs()->call(Address::Absolute(instruction->IsEnter()
3018        ? QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLockObject)
3019        : QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pUnlockObject)));
3020  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3021}
3022
3023void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3024void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3025void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3026
3027void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
3028  LocationSummary* locations =
3029      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3030  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3031         || instruction->GetResultType() == Primitive::kPrimLong);
3032  locations->SetInAt(0, Location::RequiresRegister());
3033  locations->SetInAt(1, Location::Any());
3034  locations->SetOut(Location::SameAsFirstInput());
3035}
3036
3037void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
3038  HandleBitwiseOperation(instruction);
3039}
3040
3041void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
3042  HandleBitwiseOperation(instruction);
3043}
3044
3045void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
3046  HandleBitwiseOperation(instruction);
3047}
3048
3049void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
3050  LocationSummary* locations = instruction->GetLocations();
3051  Location first = locations->InAt(0);
3052  Location second = locations->InAt(1);
3053  DCHECK(first.Equals(locations->Out()));
3054
3055  if (instruction->GetResultType() == Primitive::kPrimInt) {
3056    if (second.IsRegister()) {
3057      if (instruction->IsAnd()) {
3058        __ andl(first.As<Register>(), second.As<Register>());
3059      } else if (instruction->IsOr()) {
3060        __ orl(first.As<Register>(), second.As<Register>());
3061      } else {
3062        DCHECK(instruction->IsXor());
3063        __ xorl(first.As<Register>(), second.As<Register>());
3064      }
3065    } else if (second.IsConstant()) {
3066      if (instruction->IsAnd()) {
3067        __ andl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3068      } else if (instruction->IsOr()) {
3069        __ orl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3070      } else {
3071        DCHECK(instruction->IsXor());
3072        __ xorl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3073      }
3074    } else {
3075      if (instruction->IsAnd()) {
3076        __ andl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
3077      } else if (instruction->IsOr()) {
3078        __ orl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
3079      } else {
3080        DCHECK(instruction->IsXor());
3081        __ xorl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
3082      }
3083    }
3084  } else {
3085    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3086    if (second.IsRegisterPair()) {
3087      if (instruction->IsAnd()) {
3088        __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3089        __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3090      } else if (instruction->IsOr()) {
3091        __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3092        __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3093      } else {
3094        DCHECK(instruction->IsXor());
3095        __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3096        __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3097      }
3098    } else {
3099      if (instruction->IsAnd()) {
3100        __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3101        __ andl(first.AsRegisterPairHigh<Register>(),
3102                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3103      } else if (instruction->IsOr()) {
3104        __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3105        __ orl(first.AsRegisterPairHigh<Register>(),
3106                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3107      } else {
3108        DCHECK(instruction->IsXor());
3109        __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3110        __ xorl(first.AsRegisterPairHigh<Register>(),
3111                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3112      }
3113    }
3114  }
3115}
3116
3117}  // namespace x86
3118}  // namespace art
3119