code_generator_x86.cc revision 981e45424f52735b1c61ae0eac7e299ed313f8db
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/stack_checks.h"
27#include "utils/x86/assembler_x86.h"
28#include "utils/x86/managed_register_x86.h"
29
30namespace art {
31
32namespace x86 {
33
34static constexpr bool kExplicitStackOverflowCheck = false;
35
36static constexpr int kNumberOfPushedRegistersAtEntry = 1;
37static constexpr int kCurrentMethodStackOffset = 0;
38
39static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX, EBX };
40static constexpr size_t kRuntimeParameterCoreRegistersLength =
41    arraysize(kRuntimeParameterCoreRegisters);
42static constexpr XmmRegister kRuntimeParameterFpuRegisters[] = { };
43static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
44
45class InvokeRuntimeCallingConvention : public CallingConvention<Register, XmmRegister> {
46 public:
47  InvokeRuntimeCallingConvention()
48      : CallingConvention(kRuntimeParameterCoreRegisters,
49                          kRuntimeParameterCoreRegistersLength,
50                          kRuntimeParameterFpuRegisters,
51                          kRuntimeParameterFpuRegistersLength) {}
52
53 private:
54  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
55};
56
57#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
58
59class SlowPathCodeX86 : public SlowPathCode {
60 public:
61  SlowPathCodeX86() : entry_label_(), exit_label_() {}
62
63  Label* GetEntryLabel() { return &entry_label_; }
64  Label* GetExitLabel() { return &exit_label_; }
65
66 private:
67  Label entry_label_;
68  Label exit_label_;
69
70  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeX86);
71};
72
73class NullCheckSlowPathX86 : public SlowPathCodeX86 {
74 public:
75  explicit NullCheckSlowPathX86(HNullCheck* instruction) : instruction_(instruction) {}
76
77  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
78    __ Bind(GetEntryLabel());
79    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
80    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
81  }
82
83 private:
84  HNullCheck* const instruction_;
85  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
86};
87
88class DivZeroCheckSlowPathX86 : public SlowPathCodeX86 {
89 public:
90  explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : instruction_(instruction) {}
91
92  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
93    __ Bind(GetEntryLabel());
94    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowDivZero)));
95    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
96  }
97
98 private:
99  HDivZeroCheck* const instruction_;
100  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
101};
102
103class DivMinusOneSlowPathX86 : public SlowPathCodeX86 {
104 public:
105  explicit DivMinusOneSlowPathX86(Register reg) : reg_(reg) {}
106
107  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
108    __ Bind(GetEntryLabel());
109    __ negl(reg_);
110    __ jmp(GetExitLabel());
111  }
112
113 private:
114  Register reg_;
115  DISALLOW_COPY_AND_ASSIGN(DivMinusOneSlowPathX86);
116};
117
118class StackOverflowCheckSlowPathX86 : public SlowPathCodeX86 {
119 public:
120  StackOverflowCheckSlowPathX86() {}
121
122  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
123    __ Bind(GetEntryLabel());
124    __ addl(ESP,
125            Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
126    __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow)));
127  }
128
129 private:
130  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86);
131};
132
133class BoundsCheckSlowPathX86 : public SlowPathCodeX86 {
134 public:
135  BoundsCheckSlowPathX86(HBoundsCheck* instruction,
136                         Location index_location,
137                         Location length_location)
138      : instruction_(instruction), index_location_(index_location), length_location_(length_location) {}
139
140  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
141    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
142    __ Bind(GetEntryLabel());
143    // We're moving two locations to locations that could overlap, so we need a parallel
144    // move resolver.
145    InvokeRuntimeCallingConvention calling_convention;
146    x86_codegen->EmitParallelMoves(
147        index_location_,
148        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
149        length_location_,
150        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
151    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowArrayBounds)));
152    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
153  }
154
155 private:
156  HBoundsCheck* const instruction_;
157  const Location index_location_;
158  const Location length_location_;
159
160  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
161};
162
163class SuspendCheckSlowPathX86 : public SlowPathCodeX86 {
164 public:
165  explicit SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
166      : instruction_(instruction), successor_(successor) {}
167
168  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
169    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
170    __ Bind(GetEntryLabel());
171    codegen->SaveLiveRegisters(instruction_->GetLocations());
172    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pTestSuspend)));
173    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
174    codegen->RestoreLiveRegisters(instruction_->GetLocations());
175    if (successor_ == nullptr) {
176      __ jmp(GetReturnLabel());
177    } else {
178      __ jmp(x86_codegen->GetLabelOf(successor_));
179    }
180  }
181
182  Label* GetReturnLabel() {
183    DCHECK(successor_ == nullptr);
184    return &return_label_;
185  }
186
187 private:
188  HSuspendCheck* const instruction_;
189  HBasicBlock* const successor_;
190  Label return_label_;
191
192  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
193};
194
195class LoadStringSlowPathX86 : public SlowPathCodeX86 {
196 public:
197  explicit LoadStringSlowPathX86(HLoadString* instruction) : instruction_(instruction) {}
198
199  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
200    LocationSummary* locations = instruction_->GetLocations();
201    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
202
203    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
204    __ Bind(GetEntryLabel());
205    codegen->SaveLiveRegisters(locations);
206
207    InvokeRuntimeCallingConvention calling_convention;
208    x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
209    __ movl(calling_convention.GetRegisterAt(1), Immediate(instruction_->GetStringIndex()));
210    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pResolveString)));
211    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
212    x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
213    codegen->RestoreLiveRegisters(locations);
214
215    __ jmp(GetExitLabel());
216  }
217
218 private:
219  HLoadString* const instruction_;
220
221  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
222};
223
224class LoadClassSlowPathX86 : public SlowPathCodeX86 {
225 public:
226  LoadClassSlowPathX86(HLoadClass* cls,
227                       HInstruction* at,
228                       uint32_t dex_pc,
229                       bool do_clinit)
230      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
231    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
232  }
233
234  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
235    LocationSummary* locations = at_->GetLocations();
236    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
237    __ Bind(GetEntryLabel());
238    codegen->SaveLiveRegisters(locations);
239
240    InvokeRuntimeCallingConvention calling_convention;
241    __ movl(calling_convention.GetRegisterAt(0), Immediate(cls_->GetTypeIndex()));
242    x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
243    __ fs()->call(Address::Absolute(do_clinit_
244        ? QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInitializeStaticStorage)
245        : QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInitializeType)));
246    codegen->RecordPcInfo(at_, dex_pc_);
247
248    // Move the class to the desired location.
249    Location out = locations->Out();
250    if (out.IsValid()) {
251      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
252      x86_codegen->Move32(out, Location::RegisterLocation(EAX));
253    }
254
255    codegen->RestoreLiveRegisters(locations);
256    __ jmp(GetExitLabel());
257  }
258
259 private:
260  // The class this slow path will load.
261  HLoadClass* const cls_;
262
263  // The instruction where this slow path is happening.
264  // (Might be the load class or an initialization check).
265  HInstruction* const at_;
266
267  // The dex PC of `at_`.
268  const uint32_t dex_pc_;
269
270  // Whether to initialize the class.
271  const bool do_clinit_;
272
273  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
274};
275
276class TypeCheckSlowPathX86 : public SlowPathCodeX86 {
277 public:
278  TypeCheckSlowPathX86(HInstruction* instruction,
279                       Location class_to_check,
280                       Location object_class,
281                       uint32_t dex_pc)
282      : instruction_(instruction),
283        class_to_check_(class_to_check),
284        object_class_(object_class),
285        dex_pc_(dex_pc) {}
286
287  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
288    LocationSummary* locations = instruction_->GetLocations();
289    DCHECK(instruction_->IsCheckCast()
290           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
291
292    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
293    __ Bind(GetEntryLabel());
294    codegen->SaveLiveRegisters(locations);
295
296    // We're moving two locations to locations that could overlap, so we need a parallel
297    // move resolver.
298    InvokeRuntimeCallingConvention calling_convention;
299    x86_codegen->EmitParallelMoves(
300        class_to_check_,
301        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
302        object_class_,
303        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
304
305    if (instruction_->IsInstanceOf()) {
306      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInstanceofNonTrivial)));
307    } else {
308      DCHECK(instruction_->IsCheckCast());
309      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pCheckCast)));
310    }
311
312    codegen->RecordPcInfo(instruction_, dex_pc_);
313    if (instruction_->IsInstanceOf()) {
314      x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
315    }
316    codegen->RestoreLiveRegisters(locations);
317
318    __ jmp(GetExitLabel());
319  }
320
321 private:
322  HInstruction* const instruction_;
323  const Location class_to_check_;
324  const Location object_class_;
325  const uint32_t dex_pc_;
326
327  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
328};
329
330#undef __
331#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
332
333inline Condition X86Condition(IfCondition cond) {
334  switch (cond) {
335    case kCondEQ: return kEqual;
336    case kCondNE: return kNotEqual;
337    case kCondLT: return kLess;
338    case kCondLE: return kLessEqual;
339    case kCondGT: return kGreater;
340    case kCondGE: return kGreaterEqual;
341    default:
342      LOG(FATAL) << "Unknown if condition";
343  }
344  return kEqual;
345}
346
347void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
348  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
349}
350
351void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
352  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
353}
354
355size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
356  __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
357  return kX86WordSize;
358}
359
360size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
361  __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
362  return kX86WordSize;
363}
364
365CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
366    : CodeGenerator(graph, kNumberOfCpuRegisters, kNumberOfXmmRegisters, kNumberOfRegisterPairs),
367      block_labels_(graph->GetArena(), 0),
368      location_builder_(graph, this),
369      instruction_visitor_(graph, this),
370      move_resolver_(graph->GetArena(), this) {}
371
372size_t CodeGeneratorX86::FrameEntrySpillSize() const {
373  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
374}
375
376Location CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type) const {
377  switch (type) {
378    case Primitive::kPrimLong: {
379      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
380      X86ManagedRegister pair =
381          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
382      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
383      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
384      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
385      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
386      UpdateBlockedPairRegisters();
387      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
388    }
389
390    case Primitive::kPrimByte:
391    case Primitive::kPrimBoolean:
392    case Primitive::kPrimChar:
393    case Primitive::kPrimShort:
394    case Primitive::kPrimInt:
395    case Primitive::kPrimNot: {
396      Register reg = static_cast<Register>(
397          FindFreeEntry(blocked_core_registers_, kNumberOfCpuRegisters));
398      // Block all register pairs that contain `reg`.
399      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
400        X86ManagedRegister current =
401            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
402        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
403          blocked_register_pairs_[i] = true;
404        }
405      }
406      return Location::RegisterLocation(reg);
407    }
408
409    case Primitive::kPrimFloat:
410    case Primitive::kPrimDouble: {
411      return Location::FpuRegisterLocation(
412          FindFreeEntry(blocked_fpu_registers_, kNumberOfXmmRegisters));
413    }
414
415    case Primitive::kPrimVoid:
416      LOG(FATAL) << "Unreachable type " << type;
417  }
418
419  return Location();
420}
421
422void CodeGeneratorX86::SetupBlockedRegisters() const {
423  // Don't allocate the dalvik style register pair passing.
424  blocked_register_pairs_[ECX_EDX] = true;
425
426  // Stack register is always reserved.
427  blocked_core_registers_[ESP] = true;
428
429  // TODO: We currently don't use Quick's callee saved registers.
430  blocked_core_registers_[EBP] = true;
431  blocked_core_registers_[ESI] = true;
432  blocked_core_registers_[EDI] = true;
433
434  UpdateBlockedPairRegisters();
435}
436
437void CodeGeneratorX86::UpdateBlockedPairRegisters() const {
438  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
439    X86ManagedRegister current =
440        X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
441    if (blocked_core_registers_[current.AsRegisterPairLow()]
442        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
443      blocked_register_pairs_[i] = true;
444    }
445  }
446}
447
448InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
449      : HGraphVisitor(graph),
450        assembler_(codegen->GetAssembler()),
451        codegen_(codegen) {}
452
453void CodeGeneratorX86::GenerateFrameEntry() {
454  // Create a fake register to mimic Quick.
455  static const int kFakeReturnRegister = 8;
456  core_spill_mask_ |= (1 << kFakeReturnRegister);
457
458  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
459  if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
460    __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
461    RecordPcInfo(nullptr, 0);
462  }
463
464  // The return PC has already been pushed on the stack.
465  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
466
467  if (!skip_overflow_check && kExplicitStackOverflowCheck) {
468    SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
469    AddSlowPath(slow_path);
470
471    __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>()));
472    __ j(kLess, slow_path->GetEntryLabel());
473  }
474
475  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
476}
477
478void CodeGeneratorX86::GenerateFrameExit() {
479  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
480}
481
482void CodeGeneratorX86::Bind(HBasicBlock* block) {
483  __ Bind(GetLabelOf(block));
484}
485
486void CodeGeneratorX86::LoadCurrentMethod(Register reg) {
487  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
488}
489
490Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
491  switch (load->GetType()) {
492    case Primitive::kPrimLong:
493    case Primitive::kPrimDouble:
494      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
495      break;
496
497    case Primitive::kPrimInt:
498    case Primitive::kPrimNot:
499    case Primitive::kPrimFloat:
500      return Location::StackSlot(GetStackSlot(load->GetLocal()));
501
502    case Primitive::kPrimBoolean:
503    case Primitive::kPrimByte:
504    case Primitive::kPrimChar:
505    case Primitive::kPrimShort:
506    case Primitive::kPrimVoid:
507      LOG(FATAL) << "Unexpected type " << load->GetType();
508  }
509
510  LOG(FATAL) << "Unreachable";
511  return Location();
512}
513
514Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
515  switch (type) {
516    case Primitive::kPrimBoolean:
517    case Primitive::kPrimByte:
518    case Primitive::kPrimChar:
519    case Primitive::kPrimShort:
520    case Primitive::kPrimInt:
521    case Primitive::kPrimFloat:
522    case Primitive::kPrimNot: {
523      uint32_t index = gp_index_++;
524      if (index < calling_convention.GetNumberOfRegisters()) {
525        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
526      } else {
527        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
528      }
529    }
530
531    case Primitive::kPrimLong:
532    case Primitive::kPrimDouble: {
533      uint32_t index = gp_index_;
534      gp_index_ += 2;
535      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
536        X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
537            calling_convention.GetRegisterPairAt(index));
538        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
539      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
540        // On X86, the register index and stack index of a quick parameter is the same, since
541        // we are passing floating pointer values in core registers.
542        return Location::QuickParameter(index, index);
543      } else {
544        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
545      }
546    }
547
548    case Primitive::kPrimVoid:
549      LOG(FATAL) << "Unexpected parameter type " << type;
550      break;
551  }
552  return Location();
553}
554
555void CodeGeneratorX86::Move32(Location destination, Location source) {
556  if (source.Equals(destination)) {
557    return;
558  }
559  if (destination.IsRegister()) {
560    if (source.IsRegister()) {
561      __ movl(destination.As<Register>(), source.As<Register>());
562    } else if (source.IsFpuRegister()) {
563      __ movd(destination.As<Register>(), source.As<XmmRegister>());
564    } else {
565      DCHECK(source.IsStackSlot());
566      __ movl(destination.As<Register>(), Address(ESP, source.GetStackIndex()));
567    }
568  } else if (destination.IsFpuRegister()) {
569    if (source.IsRegister()) {
570      __ movd(destination.As<XmmRegister>(), source.As<Register>());
571    } else if (source.IsFpuRegister()) {
572      __ movaps(destination.As<XmmRegister>(), source.As<XmmRegister>());
573    } else {
574      DCHECK(source.IsStackSlot());
575      __ movss(destination.As<XmmRegister>(), Address(ESP, source.GetStackIndex()));
576    }
577  } else {
578    DCHECK(destination.IsStackSlot()) << destination;
579    if (source.IsRegister()) {
580      __ movl(Address(ESP, destination.GetStackIndex()), source.As<Register>());
581    } else if (source.IsFpuRegister()) {
582      __ movss(Address(ESP, destination.GetStackIndex()), source.As<XmmRegister>());
583    } else {
584      DCHECK(source.IsStackSlot());
585      __ pushl(Address(ESP, source.GetStackIndex()));
586      __ popl(Address(ESP, destination.GetStackIndex()));
587    }
588  }
589}
590
591void CodeGeneratorX86::Move64(Location destination, Location source) {
592  if (source.Equals(destination)) {
593    return;
594  }
595  if (destination.IsRegisterPair()) {
596    if (source.IsRegisterPair()) {
597      __ movl(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
598      __ movl(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
599    } else if (source.IsFpuRegister()) {
600      LOG(FATAL) << "Unimplemented";
601    } else if (source.IsQuickParameter()) {
602      uint16_t register_index = source.GetQuickParameterRegisterIndex();
603      uint16_t stack_index = source.GetQuickParameterStackIndex();
604      InvokeDexCallingConvention calling_convention;
605      __ movl(destination.AsRegisterPairLow<Register>(),
606              calling_convention.GetRegisterAt(register_index));
607      __ movl(destination.AsRegisterPairHigh<Register>(), Address(ESP,
608          calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize()));
609    } else {
610      DCHECK(source.IsDoubleStackSlot());
611      __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
612      __ movl(destination.AsRegisterPairHigh<Register>(),
613              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
614    }
615  } else if (destination.IsQuickParameter()) {
616    InvokeDexCallingConvention calling_convention;
617    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
618    uint16_t stack_index = destination.GetQuickParameterStackIndex();
619    if (source.IsRegister()) {
620      __ movl(calling_convention.GetRegisterAt(register_index), source.AsRegisterPairLow<Register>());
621      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(stack_index + 1)),
622              source.AsRegisterPairHigh<Register>());
623    } else if (source.IsFpuRegister()) {
624      LOG(FATAL) << "Unimplemented";
625    } else {
626      DCHECK(source.IsDoubleStackSlot());
627      __ movl(calling_convention.GetRegisterAt(register_index),
628              Address(ESP, source.GetStackIndex()));
629      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
630      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(stack_index + 1)));
631    }
632  } else if (destination.IsFpuRegister()) {
633    if (source.IsDoubleStackSlot()) {
634      __ movsd(destination.As<XmmRegister>(), Address(ESP, source.GetStackIndex()));
635    } else {
636      LOG(FATAL) << "Unimplemented";
637    }
638  } else {
639    DCHECK(destination.IsDoubleStackSlot()) << destination;
640    if (source.IsRegisterPair()) {
641      __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
642      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
643              source.AsRegisterPairHigh<Register>());
644    } else if (source.IsQuickParameter()) {
645      InvokeDexCallingConvention calling_convention;
646      uint16_t register_index = source.GetQuickParameterRegisterIndex();
647      uint16_t stack_index = source.GetQuickParameterStackIndex();
648      __ movl(Address(ESP, destination.GetStackIndex()),
649              calling_convention.GetRegisterAt(register_index));
650      DCHECK_EQ(calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize(),
651                static_cast<size_t>(destination.GetHighStackIndex(kX86WordSize)));
652    } else if (source.IsFpuRegister()) {
653      __ movsd(Address(ESP, destination.GetStackIndex()), source.As<XmmRegister>());
654    } else {
655      DCHECK(source.IsDoubleStackSlot());
656      __ pushl(Address(ESP, source.GetStackIndex()));
657      __ popl(Address(ESP, destination.GetStackIndex()));
658      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
659      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
660    }
661  }
662}
663
664void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
665  LocationSummary* locations = instruction->GetLocations();
666  if (locations != nullptr && locations->Out().Equals(location)) {
667    return;
668  }
669
670  if (locations != nullptr && locations->Out().IsConstant()) {
671    HConstant* const_to_move = locations->Out().GetConstant();
672    if (const_to_move->IsIntConstant()) {
673      Immediate imm(const_to_move->AsIntConstant()->GetValue());
674      if (location.IsRegister()) {
675        __ movl(location.As<Register>(), imm);
676      } else if (location.IsStackSlot()) {
677        __ movl(Address(ESP, location.GetStackIndex()), imm);
678      } else {
679        DCHECK(location.IsConstant());
680        DCHECK_EQ(location.GetConstant(), const_to_move);
681      }
682    } else if (const_to_move->IsLongConstant()) {
683      int64_t value = const_to_move->AsLongConstant()->GetValue();
684      if (location.IsRegisterPair()) {
685        __ movl(location.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
686        __ movl(location.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
687      } else if (location.IsDoubleStackSlot()) {
688        __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
689        __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
690      } else {
691        DCHECK(location.IsConstant());
692        DCHECK_EQ(location.GetConstant(), instruction);
693      }
694    }
695  } else if (instruction->IsTemporary()) {
696    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
697    if (temp_location.IsStackSlot()) {
698      Move32(location, temp_location);
699    } else {
700      DCHECK(temp_location.IsDoubleStackSlot());
701      Move64(location, temp_location);
702    }
703  } else if (instruction->IsLoadLocal()) {
704    int slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
705    switch (instruction->GetType()) {
706      case Primitive::kPrimBoolean:
707      case Primitive::kPrimByte:
708      case Primitive::kPrimChar:
709      case Primitive::kPrimShort:
710      case Primitive::kPrimInt:
711      case Primitive::kPrimNot:
712      case Primitive::kPrimFloat:
713        Move32(location, Location::StackSlot(slot));
714        break;
715
716      case Primitive::kPrimLong:
717      case Primitive::kPrimDouble:
718        Move64(location, Location::DoubleStackSlot(slot));
719        break;
720
721      default:
722        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
723    }
724  } else {
725    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
726    switch (instruction->GetType()) {
727      case Primitive::kPrimBoolean:
728      case Primitive::kPrimByte:
729      case Primitive::kPrimChar:
730      case Primitive::kPrimShort:
731      case Primitive::kPrimInt:
732      case Primitive::kPrimNot:
733      case Primitive::kPrimFloat:
734        Move32(location, locations->Out());
735        break;
736
737      case Primitive::kPrimLong:
738      case Primitive::kPrimDouble:
739        Move64(location, locations->Out());
740        break;
741
742      default:
743        LOG(FATAL) << "Unexpected type " << instruction->GetType();
744    }
745  }
746}
747
748void LocationsBuilderX86::VisitGoto(HGoto* got) {
749  got->SetLocations(nullptr);
750}
751
752void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
753  HBasicBlock* successor = got->GetSuccessor();
754  DCHECK(!successor->IsExitBlock());
755
756  HBasicBlock* block = got->GetBlock();
757  HInstruction* previous = got->GetPrevious();
758
759  HLoopInformation* info = block->GetLoopInformation();
760  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
761    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
762    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
763    return;
764  }
765
766  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
767    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
768  }
769  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
770    __ jmp(codegen_->GetLabelOf(successor));
771  }
772}
773
774void LocationsBuilderX86::VisitExit(HExit* exit) {
775  exit->SetLocations(nullptr);
776}
777
778void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
779  UNUSED(exit);
780  if (kIsDebugBuild) {
781    __ Comment("Unreachable");
782    __ int3();
783  }
784}
785
786void LocationsBuilderX86::VisitIf(HIf* if_instr) {
787  LocationSummary* locations =
788      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
789  HInstruction* cond = if_instr->InputAt(0);
790  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
791    locations->SetInAt(0, Location::Any());
792  }
793}
794
795void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
796  HInstruction* cond = if_instr->InputAt(0);
797  if (cond->IsIntConstant()) {
798    // Constant condition, statically compared against 1.
799    int32_t cond_value = cond->AsIntConstant()->GetValue();
800    if (cond_value == 1) {
801      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
802                                     if_instr->IfTrueSuccessor())) {
803        __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
804      }
805      return;
806    } else {
807      DCHECK_EQ(cond_value, 0);
808    }
809  } else {
810    bool materialized =
811        !cond->IsCondition() || cond->AsCondition()->NeedsMaterialization();
812    // Moves do not affect the eflags register, so if the condition is
813    // evaluated just before the if, we don't need to evaluate it
814    // again.
815    bool eflags_set = cond->IsCondition()
816        && cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr);
817    if (materialized) {
818      if (!eflags_set) {
819        // Materialized condition, compare against 0.
820        Location lhs = if_instr->GetLocations()->InAt(0);
821        if (lhs.IsRegister()) {
822          __ cmpl(lhs.As<Register>(), Immediate(0));
823        } else {
824          __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
825        }
826        __ j(kNotEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
827      } else {
828        __ j(X86Condition(cond->AsCondition()->GetCondition()),
829             codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
830      }
831    } else {
832      Location lhs = cond->GetLocations()->InAt(0);
833      Location rhs = cond->GetLocations()->InAt(1);
834      // LHS is guaranteed to be in a register (see
835      // LocationsBuilderX86::VisitCondition).
836      if (rhs.IsRegister()) {
837        __ cmpl(lhs.As<Register>(), rhs.As<Register>());
838      } else if (rhs.IsConstant()) {
839        HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
840        Immediate imm(instruction->AsIntConstant()->GetValue());
841        __ cmpl(lhs.As<Register>(), imm);
842      } else {
843        __ cmpl(lhs.As<Register>(), Address(ESP, rhs.GetStackIndex()));
844      }
845      __ j(X86Condition(cond->AsCondition()->GetCondition()),
846           codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
847    }
848  }
849  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
850                                 if_instr->IfFalseSuccessor())) {
851    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
852  }
853}
854
855void LocationsBuilderX86::VisitLocal(HLocal* local) {
856  local->SetLocations(nullptr);
857}
858
859void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
860  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
861}
862
863void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
864  local->SetLocations(nullptr);
865}
866
867void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
868  // Nothing to do, this is driven by the code generator.
869  UNUSED(load);
870}
871
872void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
873  LocationSummary* locations =
874      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
875  switch (store->InputAt(1)->GetType()) {
876    case Primitive::kPrimBoolean:
877    case Primitive::kPrimByte:
878    case Primitive::kPrimChar:
879    case Primitive::kPrimShort:
880    case Primitive::kPrimInt:
881    case Primitive::kPrimNot:
882    case Primitive::kPrimFloat:
883      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
884      break;
885
886    case Primitive::kPrimLong:
887    case Primitive::kPrimDouble:
888      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
889      break;
890
891    default:
892      LOG(FATAL) << "Unknown local type " << store->InputAt(1)->GetType();
893  }
894  store->SetLocations(locations);
895}
896
897void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
898  UNUSED(store);
899}
900
901void LocationsBuilderX86::VisitCondition(HCondition* comp) {
902  LocationSummary* locations =
903      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
904  locations->SetInAt(0, Location::RequiresRegister());
905  locations->SetInAt(1, Location::Any());
906  if (comp->NeedsMaterialization()) {
907    locations->SetOut(Location::RequiresRegister());
908  }
909}
910
911void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
912  if (comp->NeedsMaterialization()) {
913    LocationSummary* locations = comp->GetLocations();
914    Register reg = locations->Out().As<Register>();
915    // Clear register: setcc only sets the low byte.
916    __ xorl(reg, reg);
917    if (locations->InAt(1).IsRegister()) {
918      __ cmpl(locations->InAt(0).As<Register>(),
919              locations->InAt(1).As<Register>());
920    } else if (locations->InAt(1).IsConstant()) {
921      HConstant* instruction = locations->InAt(1).GetConstant();
922      Immediate imm(instruction->AsIntConstant()->GetValue());
923      __ cmpl(locations->InAt(0).As<Register>(), imm);
924    } else {
925      __ cmpl(locations->InAt(0).As<Register>(),
926              Address(ESP, locations->InAt(1).GetStackIndex()));
927    }
928    __ setb(X86Condition(comp->GetCondition()), reg);
929  }
930}
931
932void LocationsBuilderX86::VisitEqual(HEqual* comp) {
933  VisitCondition(comp);
934}
935
936void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
937  VisitCondition(comp);
938}
939
940void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
941  VisitCondition(comp);
942}
943
944void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
945  VisitCondition(comp);
946}
947
948void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
949  VisitCondition(comp);
950}
951
952void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
953  VisitCondition(comp);
954}
955
956void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
957  VisitCondition(comp);
958}
959
960void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
961  VisitCondition(comp);
962}
963
964void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
965  VisitCondition(comp);
966}
967
968void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
969  VisitCondition(comp);
970}
971
972void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
973  VisitCondition(comp);
974}
975
976void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
977  VisitCondition(comp);
978}
979
980void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
981  LocationSummary* locations =
982      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
983  locations->SetOut(Location::ConstantLocation(constant));
984}
985
986void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
987  // Will be generated at use site.
988  UNUSED(constant);
989}
990
991void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
992  LocationSummary* locations =
993      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
994  locations->SetOut(Location::ConstantLocation(constant));
995}
996
997void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
998  // Will be generated at use site.
999  UNUSED(constant);
1000}
1001
1002void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
1003  LocationSummary* locations =
1004      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1005  locations->SetOut(Location::ConstantLocation(constant));
1006}
1007
1008void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant) {
1009  // Will be generated at use site.
1010  UNUSED(constant);
1011}
1012
1013void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
1014  LocationSummary* locations =
1015      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1016  locations->SetOut(Location::ConstantLocation(constant));
1017}
1018
1019void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant) {
1020  // Will be generated at use site.
1021  UNUSED(constant);
1022}
1023
1024void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
1025  ret->SetLocations(nullptr);
1026}
1027
1028void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
1029  UNUSED(ret);
1030  codegen_->GenerateFrameExit();
1031  __ ret();
1032}
1033
1034void LocationsBuilderX86::VisitReturn(HReturn* ret) {
1035  LocationSummary* locations =
1036      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1037  switch (ret->InputAt(0)->GetType()) {
1038    case Primitive::kPrimBoolean:
1039    case Primitive::kPrimByte:
1040    case Primitive::kPrimChar:
1041    case Primitive::kPrimShort:
1042    case Primitive::kPrimInt:
1043    case Primitive::kPrimNot:
1044      locations->SetInAt(0, Location::RegisterLocation(EAX));
1045      break;
1046
1047    case Primitive::kPrimLong:
1048      locations->SetInAt(
1049          0, Location::RegisterPairLocation(EAX, EDX));
1050      break;
1051
1052    case Primitive::kPrimFloat:
1053    case Primitive::kPrimDouble:
1054      locations->SetInAt(
1055          0, Location::FpuRegisterLocation(XMM0));
1056      break;
1057
1058    default:
1059      LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
1060  }
1061}
1062
1063void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
1064  if (kIsDebugBuild) {
1065    switch (ret->InputAt(0)->GetType()) {
1066      case Primitive::kPrimBoolean:
1067      case Primitive::kPrimByte:
1068      case Primitive::kPrimChar:
1069      case Primitive::kPrimShort:
1070      case Primitive::kPrimInt:
1071      case Primitive::kPrimNot:
1072        DCHECK_EQ(ret->GetLocations()->InAt(0).As<Register>(), EAX);
1073        break;
1074
1075      case Primitive::kPrimLong:
1076        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
1077        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
1078        break;
1079
1080      case Primitive::kPrimFloat:
1081      case Primitive::kPrimDouble:
1082        DCHECK_EQ(ret->GetLocations()->InAt(0).As<XmmRegister>(), XMM0);
1083        break;
1084
1085      default:
1086        LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
1087    }
1088  }
1089  codegen_->GenerateFrameExit();
1090  __ ret();
1091}
1092
1093void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
1094  HandleInvoke(invoke);
1095}
1096
1097void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
1098  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1099
1100  // TODO: Implement all kinds of calls:
1101  // 1) boot -> boot
1102  // 2) app -> boot
1103  // 3) app -> app
1104  //
1105  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1106
1107  // temp = method;
1108  codegen_->LoadCurrentMethod(temp);
1109  // temp = temp->dex_cache_resolved_methods_;
1110  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
1111  // temp = temp[index_in_cache]
1112  __ movl(temp, Address(temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache())));
1113  // (temp + offset_of_quick_compiled_code)()
1114  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
1115
1116  DCHECK(!codegen_->IsLeafMethod());
1117  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1118}
1119
1120void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1121  HandleInvoke(invoke);
1122}
1123
1124void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
1125  LocationSummary* locations =
1126      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1127  locations->AddTemp(Location::RegisterLocation(EAX));
1128
1129  InvokeDexCallingConventionVisitor calling_convention_visitor;
1130  for (size_t i = 0; i < invoke->InputCount(); i++) {
1131    HInstruction* input = invoke->InputAt(i);
1132    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1133  }
1134
1135  switch (invoke->GetType()) {
1136    case Primitive::kPrimBoolean:
1137    case Primitive::kPrimByte:
1138    case Primitive::kPrimChar:
1139    case Primitive::kPrimShort:
1140    case Primitive::kPrimInt:
1141    case Primitive::kPrimNot:
1142      locations->SetOut(Location::RegisterLocation(EAX));
1143      break;
1144
1145    case Primitive::kPrimLong:
1146      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1147      break;
1148
1149    case Primitive::kPrimVoid:
1150      break;
1151
1152    case Primitive::kPrimDouble:
1153    case Primitive::kPrimFloat:
1154      locations->SetOut(Location::FpuRegisterLocation(XMM0));
1155      break;
1156  }
1157
1158  invoke->SetLocations(locations);
1159}
1160
1161void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1162  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1163  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1164          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1165  LocationSummary* locations = invoke->GetLocations();
1166  Location receiver = locations->InAt(0);
1167  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1168  // temp = object->GetClass();
1169  if (receiver.IsStackSlot()) {
1170    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
1171    __ movl(temp, Address(temp, class_offset));
1172  } else {
1173    __ movl(temp, Address(receiver.As<Register>(), class_offset));
1174  }
1175  // temp = temp->GetMethodAt(method_offset);
1176  __ movl(temp, Address(temp, method_offset));
1177  // call temp->GetEntryPoint();
1178  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
1179
1180  DCHECK(!codegen_->IsLeafMethod());
1181  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1182}
1183
1184void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
1185  HandleInvoke(invoke);
1186  // Add the hidden argument.
1187  invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM0));
1188}
1189
1190void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
1191  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1192  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1193  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1194          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1195  LocationSummary* locations = invoke->GetLocations();
1196  Location receiver = locations->InAt(0);
1197  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1198
1199  // Set the hidden argument.
1200  __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
1201  __ movd(invoke->GetLocations()->GetTemp(1).As<XmmRegister>(), temp);
1202
1203  // temp = object->GetClass();
1204  if (receiver.IsStackSlot()) {
1205    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
1206    __ movl(temp, Address(temp, class_offset));
1207  } else {
1208    __ movl(temp, Address(receiver.As<Register>(), class_offset));
1209  }
1210  // temp = temp->GetImtEntryAt(method_offset);
1211  __ movl(temp, Address(temp, method_offset));
1212  // call temp->GetEntryPoint();
1213  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
1214
1215  DCHECK(!codegen_->IsLeafMethod());
1216  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1217}
1218
1219void LocationsBuilderX86::VisitNeg(HNeg* neg) {
1220  LocationSummary* locations =
1221      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1222  switch (neg->GetResultType()) {
1223    case Primitive::kPrimInt:
1224    case Primitive::kPrimLong:
1225      locations->SetInAt(0, Location::RequiresRegister());
1226      locations->SetOut(Location::SameAsFirstInput());
1227      break;
1228
1229    case Primitive::kPrimFloat:
1230    case Primitive::kPrimDouble:
1231      locations->SetInAt(0, Location::RequiresFpuRegister());
1232      // Output overlaps as we need a fresh (zero-initialized)
1233      // register to perform subtraction from zero.
1234      locations->SetOut(Location::RequiresFpuRegister());
1235      break;
1236
1237    default:
1238      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1239  }
1240}
1241
1242void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
1243  LocationSummary* locations = neg->GetLocations();
1244  Location out = locations->Out();
1245  Location in = locations->InAt(0);
1246  switch (neg->GetResultType()) {
1247    case Primitive::kPrimInt:
1248      DCHECK(in.IsRegister());
1249      DCHECK(in.Equals(out));
1250      __ negl(out.As<Register>());
1251      break;
1252
1253    case Primitive::kPrimLong:
1254      DCHECK(in.IsRegisterPair());
1255      DCHECK(in.Equals(out));
1256      __ negl(out.AsRegisterPairLow<Register>());
1257      // Negation is similar to subtraction from zero.  The least
1258      // significant byte triggers a borrow when it is different from
1259      // zero; to take it into account, add 1 to the most significant
1260      // byte if the carry flag (CF) is set to 1 after the first NEGL
1261      // operation.
1262      __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
1263      __ negl(out.AsRegisterPairHigh<Register>());
1264      break;
1265
1266    case Primitive::kPrimFloat:
1267      DCHECK(!in.Equals(out));
1268      // out = 0
1269      __ xorps(out.As<XmmRegister>(), out.As<XmmRegister>());
1270      // out = out - in
1271      __ subss(out.As<XmmRegister>(), in.As<XmmRegister>());
1272      break;
1273
1274    case Primitive::kPrimDouble:
1275      DCHECK(!in.Equals(out));
1276      // out = 0
1277      __ xorpd(out.As<XmmRegister>(), out.As<XmmRegister>());
1278      // out = out - in
1279      __ subsd(out.As<XmmRegister>(), in.As<XmmRegister>());
1280      break;
1281
1282    default:
1283      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1284  }
1285}
1286
1287void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
1288  LocationSummary* locations =
1289      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1290  Primitive::Type result_type = conversion->GetResultType();
1291  Primitive::Type input_type = conversion->GetInputType();
1292  switch (result_type) {
1293    case Primitive::kPrimByte:
1294      switch (input_type) {
1295        case Primitive::kPrimShort:
1296        case Primitive::kPrimInt:
1297        case Primitive::kPrimChar:
1298          // Processing a Dex `int-to-byte' instruction.
1299          locations->SetInAt(0, Location::Any());
1300          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1301          break;
1302
1303        default:
1304          LOG(FATAL) << "Unexpected type conversion from " << input_type
1305                     << " to " << result_type;
1306      }
1307      break;
1308
1309    case Primitive::kPrimInt:
1310      switch (input_type) {
1311        case Primitive::kPrimLong:
1312          // Processing a Dex `long-to-int' instruction.
1313          locations->SetInAt(0, Location::Any());
1314          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1315          break;
1316
1317        case Primitive::kPrimFloat:
1318        case Primitive::kPrimDouble:
1319          LOG(FATAL) << "Type conversion from " << input_type
1320                     << " to " << result_type << " not yet implemented";
1321          break;
1322
1323        default:
1324          LOG(FATAL) << "Unexpected type conversion from " << input_type
1325                     << " to " << result_type;
1326      }
1327      break;
1328
1329    case Primitive::kPrimLong:
1330      switch (input_type) {
1331        case Primitive::kPrimByte:
1332        case Primitive::kPrimShort:
1333        case Primitive::kPrimInt:
1334        case Primitive::kPrimChar:
1335          // Processing a Dex `int-to-long' instruction.
1336          locations->SetInAt(0, Location::RegisterLocation(EAX));
1337          locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1338          break;
1339
1340        case Primitive::kPrimFloat:
1341        case Primitive::kPrimDouble:
1342          LOG(FATAL) << "Type conversion from " << input_type << " to "
1343                     << result_type << " not yet implemented";
1344          break;
1345
1346        default:
1347          LOG(FATAL) << "Unexpected type conversion from " << input_type
1348                     << " to " << result_type;
1349      }
1350      break;
1351
1352    case Primitive::kPrimChar:
1353      switch (input_type) {
1354        case Primitive::kPrimByte:
1355        case Primitive::kPrimShort:
1356        case Primitive::kPrimInt:
1357        case Primitive::kPrimChar:
1358          // Processing a Dex `int-to-char' instruction.
1359          locations->SetInAt(0, Location::Any());
1360          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1361          break;
1362
1363        default:
1364          LOG(FATAL) << "Unexpected type conversion from " << input_type
1365                     << " to " << result_type;
1366      }
1367      break;
1368
1369    case Primitive::kPrimFloat:
1370    case Primitive::kPrimDouble:
1371      LOG(FATAL) << "Type conversion from " << input_type
1372                 << " to " << result_type << " not yet implemented";
1373      break;
1374
1375    default:
1376      LOG(FATAL) << "Unexpected type conversion from " << input_type
1377                 << " to " << result_type;
1378  }
1379}
1380
1381void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
1382  LocationSummary* locations = conversion->GetLocations();
1383  Location out = locations->Out();
1384  Location in = locations->InAt(0);
1385  Primitive::Type result_type = conversion->GetResultType();
1386  Primitive::Type input_type = conversion->GetInputType();
1387  switch (result_type) {
1388    case Primitive::kPrimByte:
1389      switch (input_type) {
1390        case Primitive::kPrimShort:
1391        case Primitive::kPrimInt:
1392        case Primitive::kPrimChar:
1393          // Processing a Dex `int-to-byte' instruction.
1394          if (in.IsRegister()) {
1395            __ movsxb(out.As<Register>(), in.As<ByteRegister>());
1396          } else if (in.IsStackSlot()) {
1397            __ movsxb(out.As<Register>(), Address(ESP, in.GetStackIndex()));
1398          } else {
1399            DCHECK(in.GetConstant()->IsIntConstant());
1400            int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
1401            __ movl(out.As<Register>(), Immediate(static_cast<int8_t>(value)));
1402          }
1403          break;
1404
1405        default:
1406          LOG(FATAL) << "Unexpected type conversion from " << input_type
1407                     << " to " << result_type;
1408      }
1409      break;
1410
1411    case Primitive::kPrimInt:
1412      switch (input_type) {
1413        case Primitive::kPrimLong:
1414          // Processing a Dex `long-to-int' instruction.
1415          if (in.IsRegisterPair()) {
1416            __ movl(out.As<Register>(), in.AsRegisterPairLow<Register>());
1417          } else if (in.IsDoubleStackSlot()) {
1418            __ movl(out.As<Register>(), Address(ESP, in.GetStackIndex()));
1419          } else {
1420            DCHECK(in.IsConstant());
1421            DCHECK(in.GetConstant()->IsLongConstant());
1422            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1423            __ movl(out.As<Register>(), Immediate(static_cast<int32_t>(value)));
1424          }
1425          break;
1426
1427        case Primitive::kPrimFloat:
1428        case Primitive::kPrimDouble:
1429          LOG(FATAL) << "Type conversion from " << input_type
1430                     << " to " << result_type << " not yet implemented";
1431          break;
1432
1433        default:
1434          LOG(FATAL) << "Unexpected type conversion from " << input_type
1435                     << " to " << result_type;
1436      }
1437      break;
1438
1439    case Primitive::kPrimLong:
1440      switch (input_type) {
1441        case Primitive::kPrimByte:
1442        case Primitive::kPrimShort:
1443        case Primitive::kPrimInt:
1444        case Primitive::kPrimChar:
1445          // Processing a Dex `int-to-long' instruction.
1446          DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
1447          DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
1448          DCHECK_EQ(in.As<Register>(), EAX);
1449          __ cdq();
1450          break;
1451
1452        case Primitive::kPrimFloat:
1453        case Primitive::kPrimDouble:
1454          LOG(FATAL) << "Type conversion from " << input_type << " to "
1455                     << result_type << " not yet implemented";
1456          break;
1457
1458        default:
1459          LOG(FATAL) << "Unexpected type conversion from " << input_type
1460                     << " to " << result_type;
1461      }
1462      break;
1463
1464    case Primitive::kPrimChar:
1465      switch (input_type) {
1466        case Primitive::kPrimByte:
1467        case Primitive::kPrimShort:
1468        case Primitive::kPrimInt:
1469        case Primitive::kPrimChar:
1470          // Processing a Dex `Process a Dex `int-to-char'' instruction.
1471          if (in.IsRegister()) {
1472            __ movzxw(out.As<Register>(), in.As<Register>());
1473          } else if (in.IsStackSlot()) {
1474            __ movzxw(out.As<Register>(), Address(ESP, in.GetStackIndex()));
1475          } else {
1476            DCHECK(in.GetConstant()->IsIntConstant());
1477            int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
1478            __ movl(out.As<Register>(), Immediate(static_cast<uint16_t>(value)));
1479          }
1480          break;
1481
1482        default:
1483          LOG(FATAL) << "Unexpected type conversion from " << input_type
1484                     << " to " << result_type;
1485      }
1486      break;
1487
1488    case Primitive::kPrimFloat:
1489    case Primitive::kPrimDouble:
1490      LOG(FATAL) << "Type conversion from " << input_type
1491                 << " to " << result_type << " not yet implemented";
1492      break;
1493
1494    default:
1495      LOG(FATAL) << "Unexpected type conversion from " << input_type
1496                 << " to " << result_type;
1497  }
1498}
1499
1500void LocationsBuilderX86::VisitAdd(HAdd* add) {
1501  LocationSummary* locations =
1502      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1503  switch (add->GetResultType()) {
1504    case Primitive::kPrimInt:
1505    case Primitive::kPrimLong: {
1506      locations->SetInAt(0, Location::RequiresRegister());
1507      locations->SetInAt(1, Location::Any());
1508      locations->SetOut(Location::SameAsFirstInput());
1509      break;
1510    }
1511
1512    case Primitive::kPrimFloat:
1513    case Primitive::kPrimDouble: {
1514      locations->SetInAt(0, Location::RequiresFpuRegister());
1515      locations->SetInAt(1, Location::Any());
1516      locations->SetOut(Location::SameAsFirstInput());
1517      break;
1518    }
1519
1520    default:
1521      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1522      break;
1523  }
1524}
1525
1526void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
1527  LocationSummary* locations = add->GetLocations();
1528  Location first = locations->InAt(0);
1529  Location second = locations->InAt(1);
1530  DCHECK(first.Equals(locations->Out()));
1531  switch (add->GetResultType()) {
1532    case Primitive::kPrimInt: {
1533      if (second.IsRegister()) {
1534        __ addl(first.As<Register>(), second.As<Register>());
1535      } else if (second.IsConstant()) {
1536        __ addl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
1537      } else {
1538        __ addl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
1539      }
1540      break;
1541    }
1542
1543    case Primitive::kPrimLong: {
1544      if (second.IsRegisterPair()) {
1545        __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
1546        __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
1547      } else {
1548        __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
1549        __ adcl(first.AsRegisterPairHigh<Register>(),
1550                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
1551      }
1552      break;
1553    }
1554
1555    case Primitive::kPrimFloat: {
1556      if (second.IsFpuRegister()) {
1557        __ addss(first.As<XmmRegister>(), second.As<XmmRegister>());
1558      } else {
1559        __ addss(first.As<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1560      }
1561      break;
1562    }
1563
1564    case Primitive::kPrimDouble: {
1565      if (second.IsFpuRegister()) {
1566        __ addsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1567      } else {
1568        __ addsd(first.As<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1569      }
1570      break;
1571    }
1572
1573    default:
1574      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1575  }
1576}
1577
1578void LocationsBuilderX86::VisitSub(HSub* sub) {
1579  LocationSummary* locations =
1580      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1581  switch (sub->GetResultType()) {
1582    case Primitive::kPrimInt:
1583    case Primitive::kPrimLong: {
1584      locations->SetInAt(0, Location::RequiresRegister());
1585      locations->SetInAt(1, Location::Any());
1586      locations->SetOut(Location::SameAsFirstInput());
1587      break;
1588    }
1589    case Primitive::kPrimFloat:
1590    case Primitive::kPrimDouble: {
1591      locations->SetInAt(0, Location::RequiresFpuRegister());
1592      locations->SetInAt(1, Location::RequiresFpuRegister());
1593      locations->SetOut(Location::SameAsFirstInput());
1594      break;
1595    }
1596
1597    default:
1598      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1599  }
1600}
1601
1602void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
1603  LocationSummary* locations = sub->GetLocations();
1604  Location first = locations->InAt(0);
1605  Location second = locations->InAt(1);
1606  DCHECK(first.Equals(locations->Out()));
1607  switch (sub->GetResultType()) {
1608    case Primitive::kPrimInt: {
1609      if (second.IsRegister()) {
1610        __ subl(first.As<Register>(), second.As<Register>());
1611      } else if (second.IsConstant()) {
1612        __ subl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
1613      } else {
1614        __ subl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
1615      }
1616      break;
1617    }
1618
1619    case Primitive::kPrimLong: {
1620      if (second.IsRegisterPair()) {
1621        __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
1622        __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
1623      } else {
1624        __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
1625        __ sbbl(first.AsRegisterPairHigh<Register>(),
1626                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
1627      }
1628      break;
1629    }
1630
1631    case Primitive::kPrimFloat: {
1632      __ subss(first.As<XmmRegister>(), second.As<XmmRegister>());
1633      break;
1634    }
1635
1636    case Primitive::kPrimDouble: {
1637      __ subsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1638      break;
1639    }
1640
1641    default:
1642      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1643  }
1644}
1645
1646void LocationsBuilderX86::VisitMul(HMul* mul) {
1647  LocationSummary* locations =
1648      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1649  switch (mul->GetResultType()) {
1650    case Primitive::kPrimInt:
1651      locations->SetInAt(0, Location::RequiresRegister());
1652      locations->SetInAt(1, Location::Any());
1653      locations->SetOut(Location::SameAsFirstInput());
1654      break;
1655    case Primitive::kPrimLong: {
1656      locations->SetInAt(0, Location::RequiresRegister());
1657      // TODO: Currently this handles only stack operands:
1658      // - we don't have enough registers because we currently use Quick ABI.
1659      // - by the time we have a working register allocator we will probably change the ABI
1660      // and fix the above.
1661      // - we don't have a way yet to request operands on stack but the base line compiler
1662      // will leave the operands on the stack with Any().
1663      locations->SetInAt(1, Location::Any());
1664      locations->SetOut(Location::SameAsFirstInput());
1665      // Needed for imul on 32bits with 64bits output.
1666      locations->AddTemp(Location::RegisterLocation(EAX));
1667      locations->AddTemp(Location::RegisterLocation(EDX));
1668      break;
1669    }
1670    case Primitive::kPrimFloat:
1671    case Primitive::kPrimDouble: {
1672      locations->SetInAt(0, Location::RequiresFpuRegister());
1673      locations->SetInAt(1, Location::RequiresFpuRegister());
1674      locations->SetOut(Location::SameAsFirstInput());
1675      break;
1676    }
1677
1678    default:
1679      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1680  }
1681}
1682
1683void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
1684  LocationSummary* locations = mul->GetLocations();
1685  Location first = locations->InAt(0);
1686  Location second = locations->InAt(1);
1687  DCHECK(first.Equals(locations->Out()));
1688
1689  switch (mul->GetResultType()) {
1690    case Primitive::kPrimInt: {
1691      if (second.IsRegister()) {
1692        __ imull(first.As<Register>(), second.As<Register>());
1693      } else if (second.IsConstant()) {
1694        Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
1695        __ imull(first.As<Register>(), imm);
1696      } else {
1697        DCHECK(second.IsStackSlot());
1698        __ imull(first.As<Register>(), Address(ESP, second.GetStackIndex()));
1699      }
1700      break;
1701    }
1702
1703    case Primitive::kPrimLong: {
1704      DCHECK(second.IsDoubleStackSlot());
1705
1706      Register in1_hi = first.AsRegisterPairHigh<Register>();
1707      Register in1_lo = first.AsRegisterPairLow<Register>();
1708      Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
1709      Address in2_lo(ESP, second.GetStackIndex());
1710      Register eax = locations->GetTemp(0).As<Register>();
1711      Register edx = locations->GetTemp(1).As<Register>();
1712
1713      DCHECK_EQ(EAX, eax);
1714      DCHECK_EQ(EDX, edx);
1715
1716      // input: in1 - 64 bits, in2 - 64 bits
1717      // output: in1
1718      // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1719      // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1720      // parts: in1.lo = (in1.lo * in2.lo)[31:0]
1721
1722      __ movl(eax, in2_hi);
1723      // eax <- in1.lo * in2.hi
1724      __ imull(eax, in1_lo);
1725      // in1.hi <- in1.hi * in2.lo
1726      __ imull(in1_hi, in2_lo);
1727      // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1728      __ addl(in1_hi, eax);
1729      // move in1_lo to eax to prepare for double precision
1730      __ movl(eax, in1_lo);
1731      // edx:eax <- in1.lo * in2.lo
1732      __ mull(in2_lo);
1733      // in1.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1734      __ addl(in1_hi, edx);
1735      // in1.lo <- (in1.lo * in2.lo)[31:0];
1736      __ movl(in1_lo, eax);
1737
1738      break;
1739    }
1740
1741    case Primitive::kPrimFloat: {
1742      __ mulss(first.As<XmmRegister>(), second.As<XmmRegister>());
1743      break;
1744    }
1745
1746    case Primitive::kPrimDouble: {
1747      __ mulsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1748      break;
1749    }
1750
1751    default:
1752      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1753  }
1754}
1755
1756void LocationsBuilderX86::VisitDiv(HDiv* div) {
1757  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
1758      ? LocationSummary::kCall
1759      : LocationSummary::kNoCall;
1760  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
1761
1762  switch (div->GetResultType()) {
1763    case Primitive::kPrimInt: {
1764      locations->SetInAt(0, Location::RegisterLocation(EAX));
1765      locations->SetInAt(1, Location::RequiresRegister());
1766      locations->SetOut(Location::SameAsFirstInput());
1767      // Intel uses edx:eax as the dividend.
1768      locations->AddTemp(Location::RegisterLocation(EDX));
1769      break;
1770    }
1771    case Primitive::kPrimLong: {
1772      InvokeRuntimeCallingConvention calling_convention;
1773      locations->SetInAt(0, Location::RegisterPairLocation(
1774          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
1775      locations->SetInAt(1, Location::RegisterPairLocation(
1776          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
1777      // Runtime helper puts the result in EAX, EDX.
1778      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1779      break;
1780    }
1781    case Primitive::kPrimFloat:
1782    case Primitive::kPrimDouble: {
1783      locations->SetInAt(0, Location::RequiresFpuRegister());
1784      locations->SetInAt(1, Location::RequiresFpuRegister());
1785      locations->SetOut(Location::SameAsFirstInput());
1786      break;
1787    }
1788
1789    default:
1790      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1791  }
1792}
1793
1794void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
1795  LocationSummary* locations = div->GetLocations();
1796  Location out = locations->Out();
1797  Location first = locations->InAt(0);
1798  Location second = locations->InAt(1);
1799
1800  switch (div->GetResultType()) {
1801    case Primitive::kPrimInt: {
1802      DCHECK(first.Equals(out));
1803      Register first_reg = first.As<Register>();
1804      Register second_reg = second.As<Register>();
1805      DCHECK_EQ(EAX, first_reg);
1806      DCHECK_EQ(EDX, locations->GetTemp(0).As<Register>());
1807
1808      SlowPathCodeX86* slow_path =
1809          new (GetGraph()->GetArena()) DivMinusOneSlowPathX86(first_reg);
1810      codegen_->AddSlowPath(slow_path);
1811
1812      // 0x80000000/-1 triggers an arithmetic exception!
1813      // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
1814      // it's safe to just use negl instead of more complex comparisons.
1815
1816      __ cmpl(second_reg, Immediate(-1));
1817      __ j(kEqual, slow_path->GetEntryLabel());
1818
1819      // edx:eax <- sign-extended of eax
1820      __ cdq();
1821      // eax = quotient, edx = remainder
1822      __ idivl(second_reg);
1823
1824      __ Bind(slow_path->GetExitLabel());
1825      break;
1826    }
1827
1828    case Primitive::kPrimLong: {
1829      InvokeRuntimeCallingConvention calling_convention;
1830      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
1831      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
1832      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
1833      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
1834      DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
1835      DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
1836
1837      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLdiv)));
1838      codegen_->RecordPcInfo(div, div->GetDexPc());
1839
1840      break;
1841    }
1842
1843    case Primitive::kPrimFloat: {
1844      DCHECK(first.Equals(out));
1845      __ divss(first.As<XmmRegister>(), second.As<XmmRegister>());
1846      break;
1847    }
1848
1849    case Primitive::kPrimDouble: {
1850      DCHECK(first.Equals(out));
1851      __ divsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1852      break;
1853    }
1854
1855    default:
1856      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1857  }
1858}
1859
1860void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1861  LocationSummary* locations =
1862      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1863  switch (instruction->GetType()) {
1864    case Primitive::kPrimInt: {
1865      locations->SetInAt(0, Location::Any());
1866      break;
1867    }
1868    case Primitive::kPrimLong: {
1869      locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
1870      if (!instruction->IsConstant()) {
1871        locations->AddTemp(Location::RequiresRegister());
1872      }
1873      break;
1874    }
1875    default:
1876      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
1877  }
1878  if (instruction->HasUses()) {
1879    locations->SetOut(Location::SameAsFirstInput());
1880  }
1881}
1882
1883void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
1884  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86(instruction);
1885  codegen_->AddSlowPath(slow_path);
1886
1887  LocationSummary* locations = instruction->GetLocations();
1888  Location value = locations->InAt(0);
1889
1890  switch (instruction->GetType()) {
1891    case Primitive::kPrimInt: {
1892      if (value.IsRegister()) {
1893        __ testl(value.As<Register>(), value.As<Register>());
1894        __ j(kEqual, slow_path->GetEntryLabel());
1895      } else if (value.IsStackSlot()) {
1896        __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
1897        __ j(kEqual, slow_path->GetEntryLabel());
1898      } else {
1899        DCHECK(value.IsConstant()) << value;
1900        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
1901        __ jmp(slow_path->GetEntryLabel());
1902        }
1903      }
1904      break;
1905    }
1906    case Primitive::kPrimLong: {
1907      if (value.IsRegisterPair()) {
1908        Register temp = locations->GetTemp(0).As<Register>();
1909        __ movl(temp, value.AsRegisterPairLow<Register>());
1910        __ orl(temp, value.AsRegisterPairHigh<Register>());
1911        __ j(kEqual, slow_path->GetEntryLabel());
1912      } else {
1913        DCHECK(value.IsConstant()) << value;
1914        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
1915          __ jmp(slow_path->GetEntryLabel());
1916        }
1917      }
1918      break;
1919    }
1920    default:
1921      LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
1922  }
1923}
1924
1925void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
1926  LocationSummary* locations =
1927      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1928  locations->SetOut(Location::RegisterLocation(EAX));
1929  InvokeRuntimeCallingConvention calling_convention;
1930  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1931  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1932}
1933
1934void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
1935  InvokeRuntimeCallingConvention calling_convention;
1936  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1937  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
1938
1939  __ fs()->call(
1940      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
1941
1942  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1943  DCHECK(!codegen_->IsLeafMethod());
1944}
1945
1946void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
1947  LocationSummary* locations =
1948      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1949  locations->SetOut(Location::RegisterLocation(EAX));
1950  InvokeRuntimeCallingConvention calling_convention;
1951  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1952  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1953  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1954}
1955
1956void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
1957  InvokeRuntimeCallingConvention calling_convention;
1958  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1959  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
1960
1961  __ fs()->call(
1962      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocArrayWithAccessCheck)));
1963
1964  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1965  DCHECK(!codegen_->IsLeafMethod());
1966}
1967
1968void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
1969  LocationSummary* locations =
1970      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1971  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1972  if (location.IsStackSlot()) {
1973    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1974  } else if (location.IsDoubleStackSlot()) {
1975    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1976  }
1977  locations->SetOut(location);
1978}
1979
1980void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
1981  UNUSED(instruction);
1982}
1983
1984void LocationsBuilderX86::VisitNot(HNot* not_) {
1985  LocationSummary* locations =
1986      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1987  locations->SetInAt(0, Location::RequiresRegister());
1988  locations->SetOut(Location::SameAsFirstInput());
1989}
1990
1991void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
1992  LocationSummary* locations = not_->GetLocations();
1993  Location in = locations->InAt(0);
1994  Location out = locations->Out();
1995  DCHECK(in.Equals(out));
1996  switch (not_->InputAt(0)->GetType()) {
1997    case Primitive::kPrimBoolean:
1998      __ xorl(out.As<Register>(), Immediate(1));
1999      break;
2000
2001    case Primitive::kPrimInt:
2002      __ notl(out.As<Register>());
2003      break;
2004
2005    case Primitive::kPrimLong:
2006      __ notl(out.AsRegisterPairLow<Register>());
2007      __ notl(out.AsRegisterPairHigh<Register>());
2008      break;
2009
2010    default:
2011      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2012  }
2013}
2014
2015void LocationsBuilderX86::VisitCompare(HCompare* compare) {
2016  LocationSummary* locations =
2017      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2018  locations->SetInAt(0, Location::RequiresRegister());
2019  locations->SetInAt(1, Location::Any());
2020  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2021}
2022
2023void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
2024  LocationSummary* locations = compare->GetLocations();
2025  switch (compare->InputAt(0)->GetType()) {
2026    case Primitive::kPrimLong: {
2027      Label less, greater, done;
2028      Register output = locations->Out().As<Register>();
2029      Location left = locations->InAt(0);
2030      Location right = locations->InAt(1);
2031      if (right.IsRegister()) {
2032        __ cmpl(left.AsRegisterPairHigh<Register>(), right.AsRegisterPairHigh<Register>());
2033      } else {
2034        DCHECK(right.IsDoubleStackSlot());
2035        __ cmpl(left.AsRegisterPairHigh<Register>(),
2036                Address(ESP, right.GetHighStackIndex(kX86WordSize)));
2037      }
2038      __ j(kLess, &less);  // Signed compare.
2039      __ j(kGreater, &greater);  // Signed compare.
2040      if (right.IsRegisterPair()) {
2041        __ cmpl(left.AsRegisterPairLow<Register>(), right.AsRegisterPairLow<Register>());
2042      } else {
2043        DCHECK(right.IsDoubleStackSlot());
2044        __ cmpl(left.AsRegisterPairLow<Register>(), Address(ESP, right.GetStackIndex()));
2045      }
2046      __ movl(output, Immediate(0));
2047      __ j(kEqual, &done);
2048      __ j(kBelow, &less);  // Unsigned compare.
2049
2050      __ Bind(&greater);
2051      __ movl(output, Immediate(1));
2052      __ jmp(&done);
2053
2054      __ Bind(&less);
2055      __ movl(output, Immediate(-1));
2056
2057      __ Bind(&done);
2058      break;
2059    }
2060    default:
2061      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
2062  }
2063}
2064
2065void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
2066  LocationSummary* locations =
2067      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2068  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2069    locations->SetInAt(i, Location::Any());
2070  }
2071  locations->SetOut(Location::Any());
2072}
2073
2074void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
2075  UNUSED(instruction);
2076  LOG(FATAL) << "Unreachable";
2077}
2078
2079void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2080  LocationSummary* locations =
2081      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2082  locations->SetInAt(0, Location::RequiresRegister());
2083  Primitive::Type field_type = instruction->GetFieldType();
2084  bool is_object_type = field_type == Primitive::kPrimNot;
2085  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
2086      || (field_type == Primitive::kPrimByte);
2087  // The register allocator does not support multiple
2088  // inputs that die at entry with one in a specific register.
2089  if (is_byte_type) {
2090    // Ensure the value is in a byte register.
2091    locations->SetInAt(1, Location::RegisterLocation(EAX));
2092  } else {
2093    locations->SetInAt(1, Location::RequiresRegister());
2094  }
2095  // Temporary registers for the write barrier.
2096  if (is_object_type) {
2097    locations->AddTemp(Location::RequiresRegister());
2098    // Ensure the card is in a byte register.
2099    locations->AddTemp(Location::RegisterLocation(ECX));
2100  }
2101}
2102
2103void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2104  LocationSummary* locations = instruction->GetLocations();
2105  Register obj = locations->InAt(0).As<Register>();
2106  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2107  Primitive::Type field_type = instruction->GetFieldType();
2108
2109  switch (field_type) {
2110    case Primitive::kPrimBoolean:
2111    case Primitive::kPrimByte: {
2112      ByteRegister value = locations->InAt(1).As<ByteRegister>();
2113      __ movb(Address(obj, offset), value);
2114      break;
2115    }
2116
2117    case Primitive::kPrimShort:
2118    case Primitive::kPrimChar: {
2119      Register value = locations->InAt(1).As<Register>();
2120      __ movw(Address(obj, offset), value);
2121      break;
2122    }
2123
2124    case Primitive::kPrimInt:
2125    case Primitive::kPrimNot: {
2126      Register value = locations->InAt(1).As<Register>();
2127      __ movl(Address(obj, offset), value);
2128
2129      if (field_type == Primitive::kPrimNot) {
2130        Register temp = locations->GetTemp(0).As<Register>();
2131        Register card = locations->GetTemp(1).As<Register>();
2132        codegen_->MarkGCCard(temp, card, obj, value);
2133      }
2134      break;
2135    }
2136
2137    case Primitive::kPrimLong: {
2138      Location value = locations->InAt(1);
2139      __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
2140      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
2141      break;
2142    }
2143
2144    case Primitive::kPrimFloat: {
2145      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2146      __ movss(Address(obj, offset), value);
2147      break;
2148    }
2149
2150    case Primitive::kPrimDouble: {
2151      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2152      __ movsd(Address(obj, offset), value);
2153      break;
2154    }
2155
2156    case Primitive::kPrimVoid:
2157      LOG(FATAL) << "Unreachable type " << field_type;
2158      UNREACHABLE();
2159  }
2160}
2161
2162void CodeGeneratorX86::MarkGCCard(Register temp, Register card, Register object, Register value) {
2163  Label is_null;
2164  __ testl(value, value);
2165  __ j(kEqual, &is_null);
2166  __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
2167  __ movl(temp, object);
2168  __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
2169  __ movb(Address(temp, card, TIMES_1, 0),
2170          X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
2171  __ Bind(&is_null);
2172}
2173
2174void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2175  LocationSummary* locations =
2176      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2177  locations->SetInAt(0, Location::RequiresRegister());
2178  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2179}
2180
2181void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2182  LocationSummary* locations = instruction->GetLocations();
2183  Register obj = locations->InAt(0).As<Register>();
2184  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2185
2186  switch (instruction->GetType()) {
2187    case Primitive::kPrimBoolean: {
2188      Register out = locations->Out().As<Register>();
2189      __ movzxb(out, Address(obj, offset));
2190      break;
2191    }
2192
2193    case Primitive::kPrimByte: {
2194      Register out = locations->Out().As<Register>();
2195      __ movsxb(out, Address(obj, offset));
2196      break;
2197    }
2198
2199    case Primitive::kPrimShort: {
2200      Register out = locations->Out().As<Register>();
2201      __ movsxw(out, Address(obj, offset));
2202      break;
2203    }
2204
2205    case Primitive::kPrimChar: {
2206      Register out = locations->Out().As<Register>();
2207      __ movzxw(out, Address(obj, offset));
2208      break;
2209    }
2210
2211    case Primitive::kPrimInt:
2212    case Primitive::kPrimNot: {
2213      Register out = locations->Out().As<Register>();
2214      __ movl(out, Address(obj, offset));
2215      break;
2216    }
2217
2218    case Primitive::kPrimLong: {
2219      // TODO: support volatile.
2220      __ movl(locations->Out().AsRegisterPairLow<Register>(), Address(obj, offset));
2221      __ movl(locations->Out().AsRegisterPairHigh<Register>(), Address(obj, kX86WordSize + offset));
2222      break;
2223    }
2224
2225    case Primitive::kPrimFloat: {
2226      XmmRegister out = locations->Out().As<XmmRegister>();
2227      __ movss(out, Address(obj, offset));
2228      break;
2229    }
2230
2231    case Primitive::kPrimDouble: {
2232      XmmRegister out = locations->Out().As<XmmRegister>();
2233      __ movsd(out, Address(obj, offset));
2234      break;
2235    }
2236
2237    case Primitive::kPrimVoid:
2238      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2239      UNREACHABLE();
2240  }
2241}
2242
2243void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
2244  LocationSummary* locations =
2245      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2246  locations->SetInAt(0, Location::Any());
2247  if (instruction->HasUses()) {
2248    locations->SetOut(Location::SameAsFirstInput());
2249  }
2250}
2251
2252void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
2253  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction);
2254  codegen_->AddSlowPath(slow_path);
2255
2256  LocationSummary* locations = instruction->GetLocations();
2257  Location obj = locations->InAt(0);
2258
2259  if (obj.IsRegister()) {
2260    __ cmpl(obj.As<Register>(), Immediate(0));
2261  } else if (obj.IsStackSlot()) {
2262    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
2263  } else {
2264    DCHECK(obj.IsConstant()) << obj;
2265    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2266    __ jmp(slow_path->GetEntryLabel());
2267    return;
2268  }
2269  __ j(kEqual, slow_path->GetEntryLabel());
2270}
2271
2272void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
2273  LocationSummary* locations =
2274      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2275  locations->SetInAt(0, Location::RequiresRegister());
2276  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2277  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2278}
2279
2280void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
2281  LocationSummary* locations = instruction->GetLocations();
2282  Register obj = locations->InAt(0).As<Register>();
2283  Location index = locations->InAt(1);
2284
2285  switch (instruction->GetType()) {
2286    case Primitive::kPrimBoolean: {
2287      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2288      Register out = locations->Out().As<Register>();
2289      if (index.IsConstant()) {
2290        __ movzxb(out, Address(obj,
2291            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
2292      } else {
2293        __ movzxb(out, Address(obj, index.As<Register>(), TIMES_1, data_offset));
2294      }
2295      break;
2296    }
2297
2298    case Primitive::kPrimByte: {
2299      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2300      Register out = locations->Out().As<Register>();
2301      if (index.IsConstant()) {
2302        __ movsxb(out, Address(obj,
2303            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
2304      } else {
2305        __ movsxb(out, Address(obj, index.As<Register>(), TIMES_1, data_offset));
2306      }
2307      break;
2308    }
2309
2310    case Primitive::kPrimShort: {
2311      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2312      Register out = locations->Out().As<Register>();
2313      if (index.IsConstant()) {
2314        __ movsxw(out, Address(obj,
2315            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
2316      } else {
2317        __ movsxw(out, Address(obj, index.As<Register>(), TIMES_2, data_offset));
2318      }
2319      break;
2320    }
2321
2322    case Primitive::kPrimChar: {
2323      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2324      Register out = locations->Out().As<Register>();
2325      if (index.IsConstant()) {
2326        __ movzxw(out, Address(obj,
2327            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
2328      } else {
2329        __ movzxw(out, Address(obj, index.As<Register>(), TIMES_2, data_offset));
2330      }
2331      break;
2332    }
2333
2334    case Primitive::kPrimInt:
2335    case Primitive::kPrimNot: {
2336      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2337      Register out = locations->Out().As<Register>();
2338      if (index.IsConstant()) {
2339        __ movl(out, Address(obj,
2340            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
2341      } else {
2342        __ movl(out, Address(obj, index.As<Register>(), TIMES_4, data_offset));
2343      }
2344      break;
2345    }
2346
2347    case Primitive::kPrimLong: {
2348      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2349      Location out = locations->Out();
2350      if (index.IsConstant()) {
2351        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2352        __ movl(out.AsRegisterPairLow<Register>(), Address(obj, offset));
2353        __ movl(out.AsRegisterPairHigh<Register>(), Address(obj, offset + kX86WordSize));
2354      } else {
2355        __ movl(out.AsRegisterPairLow<Register>(),
2356                Address(obj, index.As<Register>(), TIMES_8, data_offset));
2357        __ movl(out.AsRegisterPairHigh<Register>(),
2358                Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize));
2359      }
2360      break;
2361    }
2362
2363    case Primitive::kPrimFloat:
2364    case Primitive::kPrimDouble:
2365      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2366      UNREACHABLE();
2367    case Primitive::kPrimVoid:
2368      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2369      UNREACHABLE();
2370  }
2371}
2372
2373void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
2374  Primitive::Type value_type = instruction->GetComponentType();
2375  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2376      instruction,
2377      value_type == Primitive::kPrimNot ? LocationSummary::kCall : LocationSummary::kNoCall);
2378
2379  if (value_type == Primitive::kPrimNot) {
2380    InvokeRuntimeCallingConvention calling_convention;
2381    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2382    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2383    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2384  } else {
2385    bool is_byte_type = (value_type == Primitive::kPrimBoolean)
2386        || (value_type == Primitive::kPrimByte);
2387    // We need the inputs to be different than the output in case of long operation.
2388    // In case of a byte operation, the register allocator does not support multiple
2389    // inputs that die at entry with one in a specific register.
2390    locations->SetInAt(0, Location::RequiresRegister());
2391    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2392    if (is_byte_type) {
2393      // Ensure the value is in a byte register.
2394      locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
2395    } else {
2396      locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
2397    }
2398  }
2399}
2400
2401void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
2402  LocationSummary* locations = instruction->GetLocations();
2403  Register obj = locations->InAt(0).As<Register>();
2404  Location index = locations->InAt(1);
2405  Location value = locations->InAt(2);
2406  Primitive::Type value_type = instruction->GetComponentType();
2407
2408  switch (value_type) {
2409    case Primitive::kPrimBoolean:
2410    case Primitive::kPrimByte: {
2411      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2412      if (index.IsConstant()) {
2413        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2414        if (value.IsRegister()) {
2415          __ movb(Address(obj, offset), value.As<ByteRegister>());
2416        } else {
2417          __ movb(Address(obj, offset),
2418                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2419        }
2420      } else {
2421        if (value.IsRegister()) {
2422          __ movb(Address(obj, index.As<Register>(), TIMES_1, data_offset),
2423                  value.As<ByteRegister>());
2424        } else {
2425          __ movb(Address(obj, index.As<Register>(), TIMES_1, data_offset),
2426                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2427        }
2428      }
2429      break;
2430    }
2431
2432    case Primitive::kPrimShort:
2433    case Primitive::kPrimChar: {
2434      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2435      if (index.IsConstant()) {
2436        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2437        if (value.IsRegister()) {
2438          __ movw(Address(obj, offset), value.As<Register>());
2439        } else {
2440          __ movw(Address(obj, offset),
2441                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2442        }
2443      } else {
2444        if (value.IsRegister()) {
2445          __ movw(Address(obj, index.As<Register>(), TIMES_2, data_offset),
2446                  value.As<Register>());
2447        } else {
2448          __ movw(Address(obj, index.As<Register>(), TIMES_2, data_offset),
2449                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2450        }
2451      }
2452      break;
2453    }
2454
2455    case Primitive::kPrimInt: {
2456      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2457      if (index.IsConstant()) {
2458        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2459        if (value.IsRegister()) {
2460          __ movl(Address(obj, offset), value.As<Register>());
2461        } else {
2462          __ movl(Address(obj, offset), Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2463        }
2464      } else {
2465        if (value.IsRegister()) {
2466          __ movl(Address(obj, index.As<Register>(), TIMES_4, data_offset),
2467                  value.As<Register>());
2468        } else {
2469          __ movl(Address(obj, index.As<Register>(), TIMES_4, data_offset),
2470                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2471        }
2472      }
2473      break;
2474    }
2475
2476    case Primitive::kPrimNot: {
2477      DCHECK(!codegen_->IsLeafMethod());
2478      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
2479      codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2480      break;
2481    }
2482
2483    case Primitive::kPrimLong: {
2484      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2485      if (index.IsConstant()) {
2486        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2487        if (value.IsRegisterPair()) {
2488          __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
2489          __ movl(Address(obj, offset + kX86WordSize), value.AsRegisterPairHigh<Register>());
2490        } else {
2491          DCHECK(value.IsConstant());
2492          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
2493          __ movl(Address(obj, offset), Immediate(Low32Bits(val)));
2494          __ movl(Address(obj, offset + kX86WordSize), Immediate(High32Bits(val)));
2495        }
2496      } else {
2497        if (value.IsRegisterPair()) {
2498          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset),
2499                  value.AsRegisterPairLow<Register>());
2500          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize),
2501                  value.AsRegisterPairHigh<Register>());
2502        } else {
2503          DCHECK(value.IsConstant());
2504          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
2505          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset),
2506                  Immediate(Low32Bits(val)));
2507          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize),
2508                  Immediate(High32Bits(val)));
2509        }
2510      }
2511      break;
2512    }
2513
2514    case Primitive::kPrimFloat:
2515    case Primitive::kPrimDouble:
2516      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2517      UNREACHABLE();
2518    case Primitive::kPrimVoid:
2519      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2520      UNREACHABLE();
2521  }
2522}
2523
2524void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
2525  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2526  locations->SetInAt(0, Location::RequiresRegister());
2527  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2528  instruction->SetLocations(locations);
2529}
2530
2531void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
2532  LocationSummary* locations = instruction->GetLocations();
2533  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
2534  Register obj = locations->InAt(0).As<Register>();
2535  Register out = locations->Out().As<Register>();
2536  __ movl(out, Address(obj, offset));
2537}
2538
2539void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
2540  LocationSummary* locations =
2541      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2542  locations->SetInAt(0, Location::RequiresRegister());
2543  locations->SetInAt(1, Location::RequiresRegister());
2544  if (instruction->HasUses()) {
2545    locations->SetOut(Location::SameAsFirstInput());
2546  }
2547}
2548
2549void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
2550  LocationSummary* locations = instruction->GetLocations();
2551  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(
2552      instruction, locations->InAt(0), locations->InAt(1));
2553  codegen_->AddSlowPath(slow_path);
2554
2555  Register index = locations->InAt(0).As<Register>();
2556  Register length = locations->InAt(1).As<Register>();
2557
2558  __ cmpl(index, length);
2559  __ j(kAboveEqual, slow_path->GetEntryLabel());
2560}
2561
2562void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
2563  temp->SetLocations(nullptr);
2564}
2565
2566void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
2567  // Nothing to do, this is driven by the code generator.
2568  UNUSED(temp);
2569}
2570
2571void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
2572  UNUSED(instruction);
2573  LOG(FATAL) << "Unreachable";
2574}
2575
2576void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
2577  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2578}
2579
2580void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
2581  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2582}
2583
2584void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
2585  HBasicBlock* block = instruction->GetBlock();
2586  if (block->GetLoopInformation() != nullptr) {
2587    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2588    // The back edge will generate the suspend check.
2589    return;
2590  }
2591  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2592    // The goto will generate the suspend check.
2593    return;
2594  }
2595  GenerateSuspendCheck(instruction, nullptr);
2596}
2597
2598void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
2599                                                       HBasicBlock* successor) {
2600  SuspendCheckSlowPathX86* slow_path =
2601      new (GetGraph()->GetArena()) SuspendCheckSlowPathX86(instruction, successor);
2602  codegen_->AddSlowPath(slow_path);
2603  __ fs()->cmpw(Address::Absolute(
2604      Thread::ThreadFlagsOffset<kX86WordSize>().Int32Value()), Immediate(0));
2605  if (successor == nullptr) {
2606    __ j(kNotEqual, slow_path->GetEntryLabel());
2607    __ Bind(slow_path->GetReturnLabel());
2608  } else {
2609    __ j(kEqual, codegen_->GetLabelOf(successor));
2610    __ jmp(slow_path->GetEntryLabel());
2611  }
2612}
2613
2614X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
2615  return codegen_->GetAssembler();
2616}
2617
2618void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
2619  ScratchRegisterScope ensure_scratch(
2620      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
2621  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
2622  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
2623  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
2624}
2625
2626void ParallelMoveResolverX86::EmitMove(size_t index) {
2627  MoveOperands* move = moves_.Get(index);
2628  Location source = move->GetSource();
2629  Location destination = move->GetDestination();
2630
2631  if (source.IsRegister()) {
2632    if (destination.IsRegister()) {
2633      __ movl(destination.As<Register>(), source.As<Register>());
2634    } else {
2635      DCHECK(destination.IsStackSlot());
2636      __ movl(Address(ESP, destination.GetStackIndex()), source.As<Register>());
2637    }
2638  } else if (source.IsStackSlot()) {
2639    if (destination.IsRegister()) {
2640      __ movl(destination.As<Register>(), Address(ESP, source.GetStackIndex()));
2641    } else {
2642      DCHECK(destination.IsStackSlot());
2643      MoveMemoryToMemory(destination.GetStackIndex(),
2644                         source.GetStackIndex());
2645    }
2646  } else if (source.IsConstant()) {
2647    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
2648    Immediate imm(instruction->AsIntConstant()->GetValue());
2649    if (destination.IsRegister()) {
2650      __ movl(destination.As<Register>(), imm);
2651    } else {
2652      __ movl(Address(ESP, destination.GetStackIndex()), imm);
2653    }
2654  } else {
2655    LOG(FATAL) << "Unimplemented";
2656  }
2657}
2658
2659void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
2660  Register suggested_scratch = reg == EAX ? EBX : EAX;
2661  ScratchRegisterScope ensure_scratch(
2662      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
2663
2664  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
2665  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
2666  __ movl(Address(ESP, mem + stack_offset), reg);
2667  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
2668}
2669
2670void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
2671  ScratchRegisterScope ensure_scratch1(
2672      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
2673
2674  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
2675  ScratchRegisterScope ensure_scratch2(
2676      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
2677
2678  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
2679  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
2680  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
2681  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
2682  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
2683  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
2684}
2685
2686void ParallelMoveResolverX86::EmitSwap(size_t index) {
2687  MoveOperands* move = moves_.Get(index);
2688  Location source = move->GetSource();
2689  Location destination = move->GetDestination();
2690
2691  if (source.IsRegister() && destination.IsRegister()) {
2692    __ xchgl(destination.As<Register>(), source.As<Register>());
2693  } else if (source.IsRegister() && destination.IsStackSlot()) {
2694    Exchange(source.As<Register>(), destination.GetStackIndex());
2695  } else if (source.IsStackSlot() && destination.IsRegister()) {
2696    Exchange(destination.As<Register>(), source.GetStackIndex());
2697  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2698    Exchange(destination.GetStackIndex(), source.GetStackIndex());
2699  } else {
2700    LOG(FATAL) << "Unimplemented";
2701  }
2702}
2703
2704void ParallelMoveResolverX86::SpillScratch(int reg) {
2705  __ pushl(static_cast<Register>(reg));
2706}
2707
2708void ParallelMoveResolverX86::RestoreScratch(int reg) {
2709  __ popl(static_cast<Register>(reg));
2710}
2711
2712void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
2713  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
2714      ? LocationSummary::kCallOnSlowPath
2715      : LocationSummary::kNoCall;
2716  LocationSummary* locations =
2717      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
2718  locations->SetOut(Location::RequiresRegister());
2719}
2720
2721void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
2722  Register out = cls->GetLocations()->Out().As<Register>();
2723  if (cls->IsReferrersClass()) {
2724    DCHECK(!cls->CanCallRuntime());
2725    DCHECK(!cls->MustGenerateClinitCheck());
2726    codegen_->LoadCurrentMethod(out);
2727    __ movl(out, Address(out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()));
2728  } else {
2729    DCHECK(cls->CanCallRuntime());
2730    codegen_->LoadCurrentMethod(out);
2731    __ movl(out, Address(out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
2732    __ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
2733
2734    SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
2735        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
2736    codegen_->AddSlowPath(slow_path);
2737    __ testl(out, out);
2738    __ j(kEqual, slow_path->GetEntryLabel());
2739    if (cls->MustGenerateClinitCheck()) {
2740      GenerateClassInitializationCheck(slow_path, out);
2741    } else {
2742      __ Bind(slow_path->GetExitLabel());
2743    }
2744  }
2745}
2746
2747void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
2748  LocationSummary* locations =
2749      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
2750  locations->SetInAt(0, Location::RequiresRegister());
2751  if (check->HasUses()) {
2752    locations->SetOut(Location::SameAsFirstInput());
2753  }
2754}
2755
2756void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
2757  // We assume the class to not be null.
2758  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
2759      check->GetLoadClass(), check, check->GetDexPc(), true);
2760  codegen_->AddSlowPath(slow_path);
2761  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
2762}
2763
2764void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
2765    SlowPathCodeX86* slow_path, Register class_reg) {
2766  __ cmpl(Address(class_reg,  mirror::Class::StatusOffset().Int32Value()),
2767          Immediate(mirror::Class::kStatusInitialized));
2768  __ j(kLess, slow_path->GetEntryLabel());
2769  __ Bind(slow_path->GetExitLabel());
2770  // No need for memory fence, thanks to the X86 memory model.
2771}
2772
2773void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2774  LocationSummary* locations =
2775      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2776  locations->SetInAt(0, Location::RequiresRegister());
2777  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2778}
2779
2780void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
2781  LocationSummary* locations = instruction->GetLocations();
2782  Register cls = locations->InAt(0).As<Register>();
2783  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2784
2785  switch (instruction->GetType()) {
2786    case Primitive::kPrimBoolean: {
2787      Register out = locations->Out().As<Register>();
2788      __ movzxb(out, Address(cls, offset));
2789      break;
2790    }
2791
2792    case Primitive::kPrimByte: {
2793      Register out = locations->Out().As<Register>();
2794      __ movsxb(out, Address(cls, offset));
2795      break;
2796    }
2797
2798    case Primitive::kPrimShort: {
2799      Register out = locations->Out().As<Register>();
2800      __ movsxw(out, Address(cls, offset));
2801      break;
2802    }
2803
2804    case Primitive::kPrimChar: {
2805      Register out = locations->Out().As<Register>();
2806      __ movzxw(out, Address(cls, offset));
2807      break;
2808    }
2809
2810    case Primitive::kPrimInt:
2811    case Primitive::kPrimNot: {
2812      Register out = locations->Out().As<Register>();
2813      __ movl(out, Address(cls, offset));
2814      break;
2815    }
2816
2817    case Primitive::kPrimLong: {
2818      // TODO: support volatile.
2819      __ movl(locations->Out().AsRegisterPairLow<Register>(), Address(cls, offset));
2820      __ movl(locations->Out().AsRegisterPairHigh<Register>(), Address(cls, kX86WordSize + offset));
2821      break;
2822    }
2823
2824    case Primitive::kPrimFloat: {
2825      XmmRegister out = locations->Out().As<XmmRegister>();
2826      __ movss(out, Address(cls, offset));
2827      break;
2828    }
2829
2830    case Primitive::kPrimDouble: {
2831      XmmRegister out = locations->Out().As<XmmRegister>();
2832      __ movsd(out, Address(cls, offset));
2833      break;
2834    }
2835
2836    case Primitive::kPrimVoid:
2837      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2838      UNREACHABLE();
2839  }
2840}
2841
2842void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2843  LocationSummary* locations =
2844      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2845  locations->SetInAt(0, Location::RequiresRegister());
2846  Primitive::Type field_type = instruction->GetFieldType();
2847  bool is_object_type = field_type == Primitive::kPrimNot;
2848  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
2849      || (field_type == Primitive::kPrimByte);
2850  // The register allocator does not support multiple
2851  // inputs that die at entry with one in a specific register.
2852  if (is_byte_type) {
2853    // Ensure the value is in a byte register.
2854    locations->SetInAt(1, Location::RegisterLocation(EAX));
2855  } else {
2856    locations->SetInAt(1, Location::RequiresRegister());
2857  }
2858  // Temporary registers for the write barrier.
2859  if (is_object_type) {
2860    locations->AddTemp(Location::RequiresRegister());
2861    // Ensure the card is in a byte register.
2862    locations->AddTemp(Location::RegisterLocation(ECX));
2863  }
2864}
2865
2866void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
2867  LocationSummary* locations = instruction->GetLocations();
2868  Register cls = locations->InAt(0).As<Register>();
2869  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2870  Primitive::Type field_type = instruction->GetFieldType();
2871
2872  switch (field_type) {
2873    case Primitive::kPrimBoolean:
2874    case Primitive::kPrimByte: {
2875      ByteRegister value = locations->InAt(1).As<ByteRegister>();
2876      __ movb(Address(cls, offset), value);
2877      break;
2878    }
2879
2880    case Primitive::kPrimShort:
2881    case Primitive::kPrimChar: {
2882      Register value = locations->InAt(1).As<Register>();
2883      __ movw(Address(cls, offset), value);
2884      break;
2885    }
2886
2887    case Primitive::kPrimInt:
2888    case Primitive::kPrimNot: {
2889      Register value = locations->InAt(1).As<Register>();
2890      __ movl(Address(cls, offset), value);
2891
2892      if (field_type == Primitive::kPrimNot) {
2893        Register temp = locations->GetTemp(0).As<Register>();
2894        Register card = locations->GetTemp(1).As<Register>();
2895        codegen_->MarkGCCard(temp, card, cls, value);
2896      }
2897      break;
2898    }
2899
2900    case Primitive::kPrimLong: {
2901      Location value = locations->InAt(1);
2902      __ movl(Address(cls, offset), value.AsRegisterPairLow<Register>());
2903      __ movl(Address(cls, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
2904      break;
2905    }
2906
2907    case Primitive::kPrimFloat: {
2908      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2909      __ movss(Address(cls, offset), value);
2910      break;
2911    }
2912
2913    case Primitive::kPrimDouble: {
2914      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2915      __ movsd(Address(cls, offset), value);
2916      break;
2917    }
2918
2919    case Primitive::kPrimVoid:
2920      LOG(FATAL) << "Unreachable type " << field_type;
2921      UNREACHABLE();
2922  }
2923}
2924
2925void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
2926  LocationSummary* locations =
2927      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
2928  locations->SetOut(Location::RequiresRegister());
2929}
2930
2931void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) {
2932  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
2933  codegen_->AddSlowPath(slow_path);
2934
2935  Register out = load->GetLocations()->Out().As<Register>();
2936  codegen_->LoadCurrentMethod(out);
2937  __ movl(out, Address(out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value()));
2938  __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
2939  __ testl(out, out);
2940  __ j(kEqual, slow_path->GetEntryLabel());
2941  __ Bind(slow_path->GetExitLabel());
2942}
2943
2944void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
2945  LocationSummary* locations =
2946      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
2947  locations->SetOut(Location::RequiresRegister());
2948}
2949
2950void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
2951  Address address = Address::Absolute(Thread::ExceptionOffset<kX86WordSize>().Int32Value());
2952  __ fs()->movl(load->GetLocations()->Out().As<Register>(), address);
2953  __ fs()->movl(address, Immediate(0));
2954}
2955
2956void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
2957  LocationSummary* locations =
2958      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2959  InvokeRuntimeCallingConvention calling_convention;
2960  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2961}
2962
2963void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
2964  __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pDeliverException)));
2965  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2966}
2967
2968void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
2969  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
2970      ? LocationSummary::kNoCall
2971      : LocationSummary::kCallOnSlowPath;
2972  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
2973  locations->SetInAt(0, Location::RequiresRegister());
2974  locations->SetInAt(1, Location::Any());
2975  locations->SetOut(Location::RequiresRegister());
2976}
2977
2978void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
2979  LocationSummary* locations = instruction->GetLocations();
2980  Register obj = locations->InAt(0).As<Register>();
2981  Location cls = locations->InAt(1);
2982  Register out = locations->Out().As<Register>();
2983  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
2984  Label done, zero;
2985  SlowPathCodeX86* slow_path = nullptr;
2986
2987  // Return 0 if `obj` is null.
2988  // TODO: avoid this check if we know obj is not null.
2989  __ testl(obj, obj);
2990  __ j(kEqual, &zero);
2991  __ movl(out, Address(obj, class_offset));
2992  // Compare the class of `obj` with `cls`.
2993  if (cls.IsRegister()) {
2994    __ cmpl(out, cls.As<Register>());
2995  } else {
2996    DCHECK(cls.IsStackSlot()) << cls;
2997    __ cmpl(out, Address(ESP, cls.GetStackIndex()));
2998  }
2999
3000  if (instruction->IsClassFinal()) {
3001    // Classes must be equal for the instanceof to succeed.
3002    __ j(kNotEqual, &zero);
3003    __ movl(out, Immediate(1));
3004    __ jmp(&done);
3005  } else {
3006    // If the classes are not equal, we go into a slow path.
3007    DCHECK(locations->OnlyCallsOnSlowPath());
3008    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
3009        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3010    codegen_->AddSlowPath(slow_path);
3011    __ j(kNotEqual, slow_path->GetEntryLabel());
3012    __ movl(out, Immediate(1));
3013    __ jmp(&done);
3014  }
3015  __ Bind(&zero);
3016  __ movl(out, Immediate(0));
3017  if (slow_path != nullptr) {
3018    __ Bind(slow_path->GetExitLabel());
3019  }
3020  __ Bind(&done);
3021}
3022
3023void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
3024  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3025      instruction, LocationSummary::kCallOnSlowPath);
3026  locations->SetInAt(0, Location::RequiresRegister());
3027  locations->SetInAt(1, Location::Any());
3028  locations->AddTemp(Location::RequiresRegister());
3029}
3030
3031void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
3032  LocationSummary* locations = instruction->GetLocations();
3033  Register obj = locations->InAt(0).As<Register>();
3034  Location cls = locations->InAt(1);
3035  Register temp = locations->GetTemp(0).As<Register>();
3036  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3037  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
3038      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3039  codegen_->AddSlowPath(slow_path);
3040
3041  // TODO: avoid this check if we know obj is not null.
3042  __ testl(obj, obj);
3043  __ j(kEqual, slow_path->GetExitLabel());
3044  __ movl(temp, Address(obj, class_offset));
3045
3046  // Compare the class of `obj` with `cls`.
3047  if (cls.IsRegister()) {
3048    __ cmpl(temp, cls.As<Register>());
3049  } else {
3050    DCHECK(cls.IsStackSlot()) << cls;
3051    __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
3052  }
3053
3054  __ j(kNotEqual, slow_path->GetEntryLabel());
3055  __ Bind(slow_path->GetExitLabel());
3056}
3057
3058void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
3059  LocationSummary* locations =
3060      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3061  InvokeRuntimeCallingConvention calling_convention;
3062  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3063}
3064
3065void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
3066  __ fs()->call(Address::Absolute(instruction->IsEnter()
3067        ? QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLockObject)
3068        : QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pUnlockObject)));
3069  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3070}
3071
3072void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3073void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3074void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3075
3076void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
3077  LocationSummary* locations =
3078      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3079  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3080         || instruction->GetResultType() == Primitive::kPrimLong);
3081  locations->SetInAt(0, Location::RequiresRegister());
3082  locations->SetInAt(1, Location::Any());
3083  locations->SetOut(Location::SameAsFirstInput());
3084}
3085
3086void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
3087  HandleBitwiseOperation(instruction);
3088}
3089
3090void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
3091  HandleBitwiseOperation(instruction);
3092}
3093
3094void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
3095  HandleBitwiseOperation(instruction);
3096}
3097
3098void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
3099  LocationSummary* locations = instruction->GetLocations();
3100  Location first = locations->InAt(0);
3101  Location second = locations->InAt(1);
3102  DCHECK(first.Equals(locations->Out()));
3103
3104  if (instruction->GetResultType() == Primitive::kPrimInt) {
3105    if (second.IsRegister()) {
3106      if (instruction->IsAnd()) {
3107        __ andl(first.As<Register>(), second.As<Register>());
3108      } else if (instruction->IsOr()) {
3109        __ orl(first.As<Register>(), second.As<Register>());
3110      } else {
3111        DCHECK(instruction->IsXor());
3112        __ xorl(first.As<Register>(), second.As<Register>());
3113      }
3114    } else if (second.IsConstant()) {
3115      if (instruction->IsAnd()) {
3116        __ andl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3117      } else if (instruction->IsOr()) {
3118        __ orl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3119      } else {
3120        DCHECK(instruction->IsXor());
3121        __ xorl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3122      }
3123    } else {
3124      if (instruction->IsAnd()) {
3125        __ andl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
3126      } else if (instruction->IsOr()) {
3127        __ orl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
3128      } else {
3129        DCHECK(instruction->IsXor());
3130        __ xorl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
3131      }
3132    }
3133  } else {
3134    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3135    if (second.IsRegisterPair()) {
3136      if (instruction->IsAnd()) {
3137        __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3138        __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3139      } else if (instruction->IsOr()) {
3140        __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3141        __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3142      } else {
3143        DCHECK(instruction->IsXor());
3144        __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3145        __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3146      }
3147    } else {
3148      if (instruction->IsAnd()) {
3149        __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3150        __ andl(first.AsRegisterPairHigh<Register>(),
3151                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3152      } else if (instruction->IsOr()) {
3153        __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3154        __ orl(first.AsRegisterPairHigh<Register>(),
3155                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3156      } else {
3157        DCHECK(instruction->IsXor());
3158        __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3159        __ xorl(first.AsRegisterPairHigh<Register>(),
3160                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3161      }
3162    }
3163  }
3164}
3165
3166}  // namespace x86
3167}  // namespace art
3168