code_generator_x86.cc revision 3f8f936aff35f29d86183d31c20597ea17e9789d
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/stack_checks.h"
27#include "utils/x86/assembler_x86.h"
28#include "utils/x86/managed_register_x86.h"
29
30namespace art {
31
32namespace x86 {
33
34static constexpr bool kExplicitStackOverflowCheck = false;
35
36static constexpr int kNumberOfPushedRegistersAtEntry = 1;
37static constexpr int kCurrentMethodStackOffset = 0;
38
39static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX, EBX };
40static constexpr size_t kRuntimeParameterCoreRegistersLength =
41    arraysize(kRuntimeParameterCoreRegisters);
42static constexpr XmmRegister kRuntimeParameterFpuRegisters[] = { };
43static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
44
45// Marker for places that can be updated once we don't follow the quick ABI.
46static constexpr bool kFollowsQuickABI = true;
47
48class InvokeRuntimeCallingConvention : public CallingConvention<Register, XmmRegister> {
49 public:
50  InvokeRuntimeCallingConvention()
51      : CallingConvention(kRuntimeParameterCoreRegisters,
52                          kRuntimeParameterCoreRegistersLength,
53                          kRuntimeParameterFpuRegisters,
54                          kRuntimeParameterFpuRegistersLength) {}
55
56 private:
57  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
58};
59
60#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
61
62class SlowPathCodeX86 : public SlowPathCode {
63 public:
64  SlowPathCodeX86() : entry_label_(), exit_label_() {}
65
66  Label* GetEntryLabel() { return &entry_label_; }
67  Label* GetExitLabel() { return &exit_label_; }
68
69 private:
70  Label entry_label_;
71  Label exit_label_;
72
73  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeX86);
74};
75
76class NullCheckSlowPathX86 : public SlowPathCodeX86 {
77 public:
78  explicit NullCheckSlowPathX86(HNullCheck* instruction) : instruction_(instruction) {}
79
80  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
81    __ Bind(GetEntryLabel());
82    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
83    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
84  }
85
86 private:
87  HNullCheck* const instruction_;
88  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
89};
90
91class DivZeroCheckSlowPathX86 : public SlowPathCodeX86 {
92 public:
93  explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : instruction_(instruction) {}
94
95  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
96    __ Bind(GetEntryLabel());
97    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowDivZero)));
98    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
99  }
100
101 private:
102  HDivZeroCheck* const instruction_;
103  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
104};
105
106class DivRemMinusOneSlowPathX86 : public SlowPathCodeX86 {
107 public:
108  explicit DivRemMinusOneSlowPathX86(Register reg, bool is_div) : reg_(reg), is_div_(is_div) {}
109
110  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
111    __ Bind(GetEntryLabel());
112    if (is_div_) {
113      __ negl(reg_);
114    } else {
115      __ movl(reg_, Immediate(0));
116    }
117    __ jmp(GetExitLabel());
118  }
119
120 private:
121  Register reg_;
122  bool is_div_;
123  DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86);
124};
125
126class StackOverflowCheckSlowPathX86 : public SlowPathCodeX86 {
127 public:
128  StackOverflowCheckSlowPathX86() {}
129
130  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
131    __ Bind(GetEntryLabel());
132    __ addl(ESP,
133            Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
134    __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow)));
135  }
136
137 private:
138  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86);
139};
140
141class BoundsCheckSlowPathX86 : public SlowPathCodeX86 {
142 public:
143  BoundsCheckSlowPathX86(HBoundsCheck* instruction,
144                         Location index_location,
145                         Location length_location)
146      : instruction_(instruction),
147        index_location_(index_location),
148        length_location_(length_location) {}
149
150  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
151    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
152    __ Bind(GetEntryLabel());
153    // We're moving two locations to locations that could overlap, so we need a parallel
154    // move resolver.
155    InvokeRuntimeCallingConvention calling_convention;
156    x86_codegen->EmitParallelMoves(
157        index_location_,
158        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
159        length_location_,
160        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
161    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowArrayBounds)));
162    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
163  }
164
165 private:
166  HBoundsCheck* const instruction_;
167  const Location index_location_;
168  const Location length_location_;
169
170  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
171};
172
173class SuspendCheckSlowPathX86 : public SlowPathCodeX86 {
174 public:
175  explicit SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
176      : instruction_(instruction), successor_(successor) {}
177
178  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
179    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
180    __ Bind(GetEntryLabel());
181    codegen->SaveLiveRegisters(instruction_->GetLocations());
182    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pTestSuspend)));
183    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
184    codegen->RestoreLiveRegisters(instruction_->GetLocations());
185    if (successor_ == nullptr) {
186      __ jmp(GetReturnLabel());
187    } else {
188      __ jmp(x86_codegen->GetLabelOf(successor_));
189    }
190  }
191
192  Label* GetReturnLabel() {
193    DCHECK(successor_ == nullptr);
194    return &return_label_;
195  }
196
197 private:
198  HSuspendCheck* const instruction_;
199  HBasicBlock* const successor_;
200  Label return_label_;
201
202  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
203};
204
205class LoadStringSlowPathX86 : public SlowPathCodeX86 {
206 public:
207  explicit LoadStringSlowPathX86(HLoadString* instruction) : instruction_(instruction) {}
208
209  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
210    LocationSummary* locations = instruction_->GetLocations();
211    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
212
213    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
214    __ Bind(GetEntryLabel());
215    codegen->SaveLiveRegisters(locations);
216
217    InvokeRuntimeCallingConvention calling_convention;
218    x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
219    __ movl(calling_convention.GetRegisterAt(1), Immediate(instruction_->GetStringIndex()));
220    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pResolveString)));
221    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
222    x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
223    codegen->RestoreLiveRegisters(locations);
224
225    __ jmp(GetExitLabel());
226  }
227
228 private:
229  HLoadString* const instruction_;
230
231  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
232};
233
234class LoadClassSlowPathX86 : public SlowPathCodeX86 {
235 public:
236  LoadClassSlowPathX86(HLoadClass* cls,
237                       HInstruction* at,
238                       uint32_t dex_pc,
239                       bool do_clinit)
240      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
241    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
242  }
243
244  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
245    LocationSummary* locations = at_->GetLocations();
246    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
247    __ Bind(GetEntryLabel());
248    codegen->SaveLiveRegisters(locations);
249
250    InvokeRuntimeCallingConvention calling_convention;
251    __ movl(calling_convention.GetRegisterAt(0), Immediate(cls_->GetTypeIndex()));
252    x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
253    __ fs()->call(Address::Absolute(do_clinit_
254        ? QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInitializeStaticStorage)
255        : QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInitializeType)));
256    codegen->RecordPcInfo(at_, dex_pc_);
257
258    // Move the class to the desired location.
259    Location out = locations->Out();
260    if (out.IsValid()) {
261      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
262      x86_codegen->Move32(out, Location::RegisterLocation(EAX));
263    }
264
265    codegen->RestoreLiveRegisters(locations);
266    __ jmp(GetExitLabel());
267  }
268
269 private:
270  // The class this slow path will load.
271  HLoadClass* const cls_;
272
273  // The instruction where this slow path is happening.
274  // (Might be the load class or an initialization check).
275  HInstruction* const at_;
276
277  // The dex PC of `at_`.
278  const uint32_t dex_pc_;
279
280  // Whether to initialize the class.
281  const bool do_clinit_;
282
283  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
284};
285
286class TypeCheckSlowPathX86 : public SlowPathCodeX86 {
287 public:
288  TypeCheckSlowPathX86(HInstruction* instruction,
289                       Location class_to_check,
290                       Location object_class,
291                       uint32_t dex_pc)
292      : instruction_(instruction),
293        class_to_check_(class_to_check),
294        object_class_(object_class),
295        dex_pc_(dex_pc) {}
296
297  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
298    LocationSummary* locations = instruction_->GetLocations();
299    DCHECK(instruction_->IsCheckCast()
300           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
301
302    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
303    __ Bind(GetEntryLabel());
304    codegen->SaveLiveRegisters(locations);
305
306    // We're moving two locations to locations that could overlap, so we need a parallel
307    // move resolver.
308    InvokeRuntimeCallingConvention calling_convention;
309    x86_codegen->EmitParallelMoves(
310        class_to_check_,
311        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
312        object_class_,
313        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
314
315    if (instruction_->IsInstanceOf()) {
316      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize,
317                                                              pInstanceofNonTrivial)));
318    } else {
319      DCHECK(instruction_->IsCheckCast());
320      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pCheckCast)));
321    }
322
323    codegen->RecordPcInfo(instruction_, dex_pc_);
324    if (instruction_->IsInstanceOf()) {
325      x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
326    }
327    codegen->RestoreLiveRegisters(locations);
328
329    __ jmp(GetExitLabel());
330  }
331
332 private:
333  HInstruction* const instruction_;
334  const Location class_to_check_;
335  const Location object_class_;
336  const uint32_t dex_pc_;
337
338  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
339};
340
341#undef __
342#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
343
344inline Condition X86Condition(IfCondition cond) {
345  switch (cond) {
346    case kCondEQ: return kEqual;
347    case kCondNE: return kNotEqual;
348    case kCondLT: return kLess;
349    case kCondLE: return kLessEqual;
350    case kCondGT: return kGreater;
351    case kCondGE: return kGreaterEqual;
352    default:
353      LOG(FATAL) << "Unknown if condition";
354  }
355  return kEqual;
356}
357
358void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
359  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
360}
361
362void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
363  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
364}
365
366size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
367  __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
368  return kX86WordSize;
369}
370
371size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
372  __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
373  return kX86WordSize;
374}
375
376CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
377    : CodeGenerator(graph, kNumberOfCpuRegisters, kNumberOfXmmRegisters, kNumberOfRegisterPairs),
378      block_labels_(graph->GetArena(), 0),
379      location_builder_(graph, this),
380      instruction_visitor_(graph, this),
381      move_resolver_(graph->GetArena(), this) {}
382
383size_t CodeGeneratorX86::FrameEntrySpillSize() const {
384  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
385}
386
387Location CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type) const {
388  switch (type) {
389    case Primitive::kPrimLong: {
390      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
391      X86ManagedRegister pair =
392          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
393      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
394      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
395      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
396      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
397      UpdateBlockedPairRegisters();
398      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
399    }
400
401    case Primitive::kPrimByte:
402    case Primitive::kPrimBoolean:
403    case Primitive::kPrimChar:
404    case Primitive::kPrimShort:
405    case Primitive::kPrimInt:
406    case Primitive::kPrimNot: {
407      Register reg = static_cast<Register>(
408          FindFreeEntry(blocked_core_registers_, kNumberOfCpuRegisters));
409      // Block all register pairs that contain `reg`.
410      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
411        X86ManagedRegister current =
412            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
413        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
414          blocked_register_pairs_[i] = true;
415        }
416      }
417      return Location::RegisterLocation(reg);
418    }
419
420    case Primitive::kPrimFloat:
421    case Primitive::kPrimDouble: {
422      return Location::FpuRegisterLocation(
423          FindFreeEntry(blocked_fpu_registers_, kNumberOfXmmRegisters));
424    }
425
426    case Primitive::kPrimVoid:
427      LOG(FATAL) << "Unreachable type " << type;
428  }
429
430  return Location();
431}
432
433void CodeGeneratorX86::SetupBlockedRegisters() const {
434  // Don't allocate the dalvik style register pair passing.
435  blocked_register_pairs_[ECX_EDX] = true;
436
437  // Stack register is always reserved.
438  blocked_core_registers_[ESP] = true;
439
440  // TODO: We currently don't use Quick's callee saved registers.
441  DCHECK(kFollowsQuickABI);
442  blocked_core_registers_[EBP] = true;
443  blocked_core_registers_[ESI] = true;
444  blocked_core_registers_[EDI] = true;
445
446  UpdateBlockedPairRegisters();
447}
448
449void CodeGeneratorX86::UpdateBlockedPairRegisters() const {
450  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
451    X86ManagedRegister current =
452        X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
453    if (blocked_core_registers_[current.AsRegisterPairLow()]
454        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
455      blocked_register_pairs_[i] = true;
456    }
457  }
458}
459
460InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
461      : HGraphVisitor(graph),
462        assembler_(codegen->GetAssembler()),
463        codegen_(codegen) {}
464
465void CodeGeneratorX86::GenerateFrameEntry() {
466  // Create a fake register to mimic Quick.
467  static const int kFakeReturnRegister = 8;
468  core_spill_mask_ |= (1 << kFakeReturnRegister);
469
470  bool skip_overflow_check =
471      IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
472  if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
473    __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
474    RecordPcInfo(nullptr, 0);
475  }
476
477  // The return PC has already been pushed on the stack.
478  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
479
480  if (!skip_overflow_check && kExplicitStackOverflowCheck) {
481    SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
482    AddSlowPath(slow_path);
483
484    __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>()));
485    __ j(kLess, slow_path->GetEntryLabel());
486  }
487
488  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
489}
490
491void CodeGeneratorX86::GenerateFrameExit() {
492  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
493}
494
495void CodeGeneratorX86::Bind(HBasicBlock* block) {
496  __ Bind(GetLabelOf(block));
497}
498
499void CodeGeneratorX86::LoadCurrentMethod(Register reg) {
500  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
501}
502
503Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
504  switch (load->GetType()) {
505    case Primitive::kPrimLong:
506    case Primitive::kPrimDouble:
507      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
508      break;
509
510    case Primitive::kPrimInt:
511    case Primitive::kPrimNot:
512    case Primitive::kPrimFloat:
513      return Location::StackSlot(GetStackSlot(load->GetLocal()));
514
515    case Primitive::kPrimBoolean:
516    case Primitive::kPrimByte:
517    case Primitive::kPrimChar:
518    case Primitive::kPrimShort:
519    case Primitive::kPrimVoid:
520      LOG(FATAL) << "Unexpected type " << load->GetType();
521  }
522
523  LOG(FATAL) << "Unreachable";
524  return Location();
525}
526
527Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
528  switch (type) {
529    case Primitive::kPrimBoolean:
530    case Primitive::kPrimByte:
531    case Primitive::kPrimChar:
532    case Primitive::kPrimShort:
533    case Primitive::kPrimInt:
534    case Primitive::kPrimFloat:
535    case Primitive::kPrimNot: {
536      uint32_t index = gp_index_++;
537      if (index < calling_convention.GetNumberOfRegisters()) {
538        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
539      } else {
540        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
541      }
542    }
543
544    case Primitive::kPrimLong:
545    case Primitive::kPrimDouble: {
546      uint32_t index = gp_index_;
547      gp_index_ += 2;
548      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
549        X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
550            calling_convention.GetRegisterPairAt(index));
551        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
552      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
553        // On X86, the register index and stack index of a quick parameter is the same, since
554        // we are passing floating pointer values in core registers.
555        return Location::QuickParameter(index, index);
556      } else {
557        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
558      }
559    }
560
561    case Primitive::kPrimVoid:
562      LOG(FATAL) << "Unexpected parameter type " << type;
563      break;
564  }
565  return Location();
566}
567
568void CodeGeneratorX86::Move32(Location destination, Location source) {
569  if (source.Equals(destination)) {
570    return;
571  }
572  if (destination.IsRegister()) {
573    if (source.IsRegister()) {
574      __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
575    } else if (source.IsFpuRegister()) {
576      __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>());
577    } else {
578      DCHECK(source.IsStackSlot());
579      __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
580    }
581  } else if (destination.IsFpuRegister()) {
582    if (source.IsRegister()) {
583      __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>());
584    } else if (source.IsFpuRegister()) {
585      __ movaps(destination.AsFpuRegister<XmmRegister>(), source.AsFpuRegister<XmmRegister>());
586    } else {
587      DCHECK(source.IsStackSlot());
588      __ movss(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
589    }
590  } else {
591    DCHECK(destination.IsStackSlot()) << destination;
592    if (source.IsRegister()) {
593      __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
594    } else if (source.IsFpuRegister()) {
595      __ movss(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
596    } else {
597      DCHECK(source.IsStackSlot());
598      __ pushl(Address(ESP, source.GetStackIndex()));
599      __ popl(Address(ESP, destination.GetStackIndex()));
600    }
601  }
602}
603
604void CodeGeneratorX86::Move64(Location destination, Location source) {
605  if (source.Equals(destination)) {
606    return;
607  }
608  if (destination.IsRegisterPair()) {
609    if (source.IsRegisterPair()) {
610      EmitParallelMoves(
611          Location::RegisterLocation(source.AsRegisterPairHigh<Register>()),
612          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()),
613          Location::RegisterLocation(source.AsRegisterPairLow<Register>()),
614          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()));
615    } else if (source.IsFpuRegister()) {
616      LOG(FATAL) << "Unimplemented";
617    } else if (source.IsQuickParameter()) {
618      uint16_t register_index = source.GetQuickParameterRegisterIndex();
619      uint16_t stack_index = source.GetQuickParameterStackIndex();
620      InvokeDexCallingConvention calling_convention;
621      EmitParallelMoves(
622          Location::RegisterLocation(calling_convention.GetRegisterAt(register_index)),
623          Location::RegisterLocation(destination.AsRegisterPairLow<Register>()),
624          Location::StackSlot(
625              calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize()),
626          Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()));
627    } else {
628      // No conflict possible, so just do the moves.
629      DCHECK(source.IsDoubleStackSlot());
630      __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
631      __ movl(destination.AsRegisterPairHigh<Register>(),
632              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
633    }
634  } else if (destination.IsQuickParameter()) {
635    InvokeDexCallingConvention calling_convention;
636    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
637    uint16_t stack_index = destination.GetQuickParameterStackIndex();
638    if (source.IsRegisterPair()) {
639      LOG(FATAL) << "Unimplemented";
640    } else if (source.IsFpuRegister()) {
641      LOG(FATAL) << "Unimplemented";
642    } else {
643      DCHECK(source.IsDoubleStackSlot());
644      EmitParallelMoves(
645          Location::StackSlot(source.GetStackIndex()),
646          Location::RegisterLocation(calling_convention.GetRegisterAt(register_index)),
647          Location::StackSlot(source.GetHighStackIndex(kX86WordSize)),
648          Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index + 1)));
649    }
650  } else if (destination.IsFpuRegister()) {
651    if (source.IsDoubleStackSlot()) {
652      __ movsd(destination.AsFpuRegister<XmmRegister>(), Address(ESP, source.GetStackIndex()));
653    } else {
654      LOG(FATAL) << "Unimplemented";
655    }
656  } else {
657    DCHECK(destination.IsDoubleStackSlot()) << destination;
658    if (source.IsRegisterPair()) {
659      // No conflict possible, so just do the moves.
660      __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
661      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
662              source.AsRegisterPairHigh<Register>());
663    } else if (source.IsQuickParameter()) {
664      // No conflict possible, so just do the move.
665      InvokeDexCallingConvention calling_convention;
666      uint16_t register_index = source.GetQuickParameterRegisterIndex();
667      uint16_t stack_index = source.GetQuickParameterStackIndex();
668      // Just move the low part. The only time a source is a quick parameter is
669      // when moving the parameter to its stack locations. And the (Java) caller
670      // of this method has already done that.
671      __ movl(Address(ESP, destination.GetStackIndex()),
672              calling_convention.GetRegisterAt(register_index));
673      DCHECK_EQ(calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize(),
674                static_cast<size_t>(destination.GetHighStackIndex(kX86WordSize)));
675    } else if (source.IsFpuRegister()) {
676      __ movsd(Address(ESP, destination.GetStackIndex()), source.AsFpuRegister<XmmRegister>());
677    } else {
678      DCHECK(source.IsDoubleStackSlot());
679      EmitParallelMoves(
680          Location::StackSlot(source.GetStackIndex()),
681          Location::StackSlot(destination.GetStackIndex()),
682          Location::StackSlot(source.GetHighStackIndex(kX86WordSize)),
683          Location::StackSlot(destination.GetHighStackIndex(kX86WordSize)));
684    }
685  }
686}
687
688void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
689  LocationSummary* locations = instruction->GetLocations();
690  if (locations != nullptr && locations->Out().Equals(location)) {
691    return;
692  }
693
694  if (locations != nullptr && locations->Out().IsConstant()) {
695    HConstant* const_to_move = locations->Out().GetConstant();
696    if (const_to_move->IsIntConstant()) {
697      Immediate imm(const_to_move->AsIntConstant()->GetValue());
698      if (location.IsRegister()) {
699        __ movl(location.AsRegister<Register>(), imm);
700      } else if (location.IsStackSlot()) {
701        __ movl(Address(ESP, location.GetStackIndex()), imm);
702      } else {
703        DCHECK(location.IsConstant());
704        DCHECK_EQ(location.GetConstant(), const_to_move);
705      }
706    } else if (const_to_move->IsLongConstant()) {
707      int64_t value = const_to_move->AsLongConstant()->GetValue();
708      if (location.IsRegisterPair()) {
709        __ movl(location.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
710        __ movl(location.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
711      } else if (location.IsDoubleStackSlot()) {
712        __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
713        __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)),
714                Immediate(High32Bits(value)));
715      } else {
716        DCHECK(location.IsConstant());
717        DCHECK_EQ(location.GetConstant(), instruction);
718      }
719    }
720  } else if (instruction->IsTemporary()) {
721    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
722    if (temp_location.IsStackSlot()) {
723      Move32(location, temp_location);
724    } else {
725      DCHECK(temp_location.IsDoubleStackSlot());
726      Move64(location, temp_location);
727    }
728  } else if (instruction->IsLoadLocal()) {
729    int slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
730    switch (instruction->GetType()) {
731      case Primitive::kPrimBoolean:
732      case Primitive::kPrimByte:
733      case Primitive::kPrimChar:
734      case Primitive::kPrimShort:
735      case Primitive::kPrimInt:
736      case Primitive::kPrimNot:
737      case Primitive::kPrimFloat:
738        Move32(location, Location::StackSlot(slot));
739        break;
740
741      case Primitive::kPrimLong:
742      case Primitive::kPrimDouble:
743        Move64(location, Location::DoubleStackSlot(slot));
744        break;
745
746      default:
747        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
748    }
749  } else {
750    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
751    switch (instruction->GetType()) {
752      case Primitive::kPrimBoolean:
753      case Primitive::kPrimByte:
754      case Primitive::kPrimChar:
755      case Primitive::kPrimShort:
756      case Primitive::kPrimInt:
757      case Primitive::kPrimNot:
758      case Primitive::kPrimFloat:
759        Move32(location, locations->Out());
760        break;
761
762      case Primitive::kPrimLong:
763      case Primitive::kPrimDouble:
764        Move64(location, locations->Out());
765        break;
766
767      default:
768        LOG(FATAL) << "Unexpected type " << instruction->GetType();
769    }
770  }
771}
772
773void LocationsBuilderX86::VisitGoto(HGoto* got) {
774  got->SetLocations(nullptr);
775}
776
777void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
778  HBasicBlock* successor = got->GetSuccessor();
779  DCHECK(!successor->IsExitBlock());
780
781  HBasicBlock* block = got->GetBlock();
782  HInstruction* previous = got->GetPrevious();
783
784  HLoopInformation* info = block->GetLoopInformation();
785  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
786    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
787    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
788    return;
789  }
790
791  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
792    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
793  }
794  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
795    __ jmp(codegen_->GetLabelOf(successor));
796  }
797}
798
799void LocationsBuilderX86::VisitExit(HExit* exit) {
800  exit->SetLocations(nullptr);
801}
802
803void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
804  UNUSED(exit);
805  if (kIsDebugBuild) {
806    __ Comment("Unreachable");
807    __ int3();
808  }
809}
810
811void LocationsBuilderX86::VisitIf(HIf* if_instr) {
812  LocationSummary* locations =
813      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
814  HInstruction* cond = if_instr->InputAt(0);
815  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
816    locations->SetInAt(0, Location::Any());
817  }
818}
819
820void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
821  HInstruction* cond = if_instr->InputAt(0);
822  if (cond->IsIntConstant()) {
823    // Constant condition, statically compared against 1.
824    int32_t cond_value = cond->AsIntConstant()->GetValue();
825    if (cond_value == 1) {
826      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
827                                     if_instr->IfTrueSuccessor())) {
828        __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
829      }
830      return;
831    } else {
832      DCHECK_EQ(cond_value, 0);
833    }
834  } else {
835    bool materialized =
836        !cond->IsCondition() || cond->AsCondition()->NeedsMaterialization();
837    // Moves do not affect the eflags register, so if the condition is
838    // evaluated just before the if, we don't need to evaluate it
839    // again.
840    bool eflags_set = cond->IsCondition()
841        && cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr);
842    if (materialized) {
843      if (!eflags_set) {
844        // Materialized condition, compare against 0.
845        Location lhs = if_instr->GetLocations()->InAt(0);
846        if (lhs.IsRegister()) {
847          __ cmpl(lhs.AsRegister<Register>(), Immediate(0));
848        } else {
849          __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
850        }
851        __ j(kNotEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
852      } else {
853        __ j(X86Condition(cond->AsCondition()->GetCondition()),
854             codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
855      }
856    } else {
857      Location lhs = cond->GetLocations()->InAt(0);
858      Location rhs = cond->GetLocations()->InAt(1);
859      // LHS is guaranteed to be in a register (see
860      // LocationsBuilderX86::VisitCondition).
861      if (rhs.IsRegister()) {
862        __ cmpl(lhs.AsRegister<Register>(), rhs.AsRegister<Register>());
863      } else if (rhs.IsConstant()) {
864        HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
865        Immediate imm(instruction->AsIntConstant()->GetValue());
866        __ cmpl(lhs.AsRegister<Register>(), imm);
867      } else {
868        __ cmpl(lhs.AsRegister<Register>(), Address(ESP, rhs.GetStackIndex()));
869      }
870      __ j(X86Condition(cond->AsCondition()->GetCondition()),
871           codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
872    }
873  }
874  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
875                                 if_instr->IfFalseSuccessor())) {
876    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
877  }
878}
879
880void LocationsBuilderX86::VisitLocal(HLocal* local) {
881  local->SetLocations(nullptr);
882}
883
884void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
885  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
886}
887
888void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
889  local->SetLocations(nullptr);
890}
891
892void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
893  // Nothing to do, this is driven by the code generator.
894  UNUSED(load);
895}
896
897void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
898  LocationSummary* locations =
899      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
900  switch (store->InputAt(1)->GetType()) {
901    case Primitive::kPrimBoolean:
902    case Primitive::kPrimByte:
903    case Primitive::kPrimChar:
904    case Primitive::kPrimShort:
905    case Primitive::kPrimInt:
906    case Primitive::kPrimNot:
907    case Primitive::kPrimFloat:
908      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
909      break;
910
911    case Primitive::kPrimLong:
912    case Primitive::kPrimDouble:
913      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
914      break;
915
916    default:
917      LOG(FATAL) << "Unknown local type " << store->InputAt(1)->GetType();
918  }
919  store->SetLocations(locations);
920}
921
922void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
923  UNUSED(store);
924}
925
926void LocationsBuilderX86::VisitCondition(HCondition* comp) {
927  LocationSummary* locations =
928      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
929  locations->SetInAt(0, Location::RequiresRegister());
930  locations->SetInAt(1, Location::Any());
931  if (comp->NeedsMaterialization()) {
932    locations->SetOut(Location::RequiresRegister());
933  }
934}
935
936void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
937  if (comp->NeedsMaterialization()) {
938    LocationSummary* locations = comp->GetLocations();
939    Register reg = locations->Out().AsRegister<Register>();
940    // Clear register: setcc only sets the low byte.
941    __ xorl(reg, reg);
942    if (locations->InAt(1).IsRegister()) {
943      __ cmpl(locations->InAt(0).AsRegister<Register>(),
944              locations->InAt(1).AsRegister<Register>());
945    } else if (locations->InAt(1).IsConstant()) {
946      HConstant* instruction = locations->InAt(1).GetConstant();
947      Immediate imm(instruction->AsIntConstant()->GetValue());
948      __ cmpl(locations->InAt(0).AsRegister<Register>(), imm);
949    } else {
950      __ cmpl(locations->InAt(0).AsRegister<Register>(),
951              Address(ESP, locations->InAt(1).GetStackIndex()));
952    }
953    __ setb(X86Condition(comp->GetCondition()), reg);
954  }
955}
956
957void LocationsBuilderX86::VisitEqual(HEqual* comp) {
958  VisitCondition(comp);
959}
960
961void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
962  VisitCondition(comp);
963}
964
965void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
966  VisitCondition(comp);
967}
968
969void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
970  VisitCondition(comp);
971}
972
973void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
974  VisitCondition(comp);
975}
976
977void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
978  VisitCondition(comp);
979}
980
981void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
982  VisitCondition(comp);
983}
984
985void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
986  VisitCondition(comp);
987}
988
989void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
990  VisitCondition(comp);
991}
992
993void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
994  VisitCondition(comp);
995}
996
997void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
998  VisitCondition(comp);
999}
1000
1001void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
1002  VisitCondition(comp);
1003}
1004
1005void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
1006  LocationSummary* locations =
1007      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1008  locations->SetOut(Location::ConstantLocation(constant));
1009}
1010
1011void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
1012  // Will be generated at use site.
1013  UNUSED(constant);
1014}
1015
1016void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
1017  LocationSummary* locations =
1018      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1019  locations->SetOut(Location::ConstantLocation(constant));
1020}
1021
1022void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
1023  // Will be generated at use site.
1024  UNUSED(constant);
1025}
1026
1027void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
1028  LocationSummary* locations =
1029      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1030  locations->SetOut(Location::ConstantLocation(constant));
1031}
1032
1033void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant) {
1034  // Will be generated at use site.
1035  UNUSED(constant);
1036}
1037
1038void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
1039  LocationSummary* locations =
1040      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1041  locations->SetOut(Location::ConstantLocation(constant));
1042}
1043
1044void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant) {
1045  // Will be generated at use site.
1046  UNUSED(constant);
1047}
1048
1049void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
1050  ret->SetLocations(nullptr);
1051}
1052
1053void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
1054  UNUSED(ret);
1055  codegen_->GenerateFrameExit();
1056  __ ret();
1057}
1058
1059void LocationsBuilderX86::VisitReturn(HReturn* ret) {
1060  LocationSummary* locations =
1061      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1062  switch (ret->InputAt(0)->GetType()) {
1063    case Primitive::kPrimBoolean:
1064    case Primitive::kPrimByte:
1065    case Primitive::kPrimChar:
1066    case Primitive::kPrimShort:
1067    case Primitive::kPrimInt:
1068    case Primitive::kPrimNot:
1069      locations->SetInAt(0, Location::RegisterLocation(EAX));
1070      break;
1071
1072    case Primitive::kPrimLong:
1073      locations->SetInAt(
1074          0, Location::RegisterPairLocation(EAX, EDX));
1075      break;
1076
1077    case Primitive::kPrimFloat:
1078    case Primitive::kPrimDouble:
1079      locations->SetInAt(
1080          0, Location::FpuRegisterLocation(XMM0));
1081      break;
1082
1083    default:
1084      LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
1085  }
1086}
1087
1088void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
1089  if (kIsDebugBuild) {
1090    switch (ret->InputAt(0)->GetType()) {
1091      case Primitive::kPrimBoolean:
1092      case Primitive::kPrimByte:
1093      case Primitive::kPrimChar:
1094      case Primitive::kPrimShort:
1095      case Primitive::kPrimInt:
1096      case Primitive::kPrimNot:
1097        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegister<Register>(), EAX);
1098        break;
1099
1100      case Primitive::kPrimLong:
1101        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
1102        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
1103        break;
1104
1105      case Primitive::kPrimFloat:
1106      case Primitive::kPrimDouble:
1107        DCHECK_EQ(ret->GetLocations()->InAt(0).AsFpuRegister<XmmRegister>(), XMM0);
1108        break;
1109
1110      default:
1111        LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
1112    }
1113  }
1114  codegen_->GenerateFrameExit();
1115  __ ret();
1116}
1117
1118void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
1119  HandleInvoke(invoke);
1120}
1121
1122void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
1123  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1124
1125  // TODO: Implement all kinds of calls:
1126  // 1) boot -> boot
1127  // 2) app -> boot
1128  // 3) app -> app
1129  //
1130  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1131
1132  // temp = method;
1133  codegen_->LoadCurrentMethod(temp);
1134  // temp = temp->dex_cache_resolved_methods_;
1135  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
1136  // temp = temp[index_in_cache]
1137  __ movl(temp, Address(temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache())));
1138  // (temp + offset_of_quick_compiled_code)()
1139  __ call(Address(
1140      temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86WordSize).Int32Value()));
1141
1142  DCHECK(!codegen_->IsLeafMethod());
1143  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1144}
1145
1146void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1147  HandleInvoke(invoke);
1148}
1149
1150void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
1151  LocationSummary* locations =
1152      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1153  locations->AddTemp(Location::RegisterLocation(EAX));
1154
1155  InvokeDexCallingConventionVisitor calling_convention_visitor;
1156  for (size_t i = 0; i < invoke->InputCount(); i++) {
1157    HInstruction* input = invoke->InputAt(i);
1158    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1159  }
1160
1161  switch (invoke->GetType()) {
1162    case Primitive::kPrimBoolean:
1163    case Primitive::kPrimByte:
1164    case Primitive::kPrimChar:
1165    case Primitive::kPrimShort:
1166    case Primitive::kPrimInt:
1167    case Primitive::kPrimNot:
1168      locations->SetOut(Location::RegisterLocation(EAX));
1169      break;
1170
1171    case Primitive::kPrimLong:
1172      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1173      break;
1174
1175    case Primitive::kPrimVoid:
1176      break;
1177
1178    case Primitive::kPrimDouble:
1179    case Primitive::kPrimFloat:
1180      locations->SetOut(Location::FpuRegisterLocation(XMM0));
1181      break;
1182  }
1183
1184  invoke->SetLocations(locations);
1185}
1186
1187void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1188  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1189  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1190          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1191  LocationSummary* locations = invoke->GetLocations();
1192  Location receiver = locations->InAt(0);
1193  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1194  // temp = object->GetClass();
1195  if (receiver.IsStackSlot()) {
1196    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
1197    __ movl(temp, Address(temp, class_offset));
1198  } else {
1199    __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
1200  }
1201  // temp = temp->GetMethodAt(method_offset);
1202  __ movl(temp, Address(temp, method_offset));
1203  // call temp->GetEntryPoint();
1204  __ call(Address(
1205      temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86WordSize).Int32Value()));
1206
1207  DCHECK(!codegen_->IsLeafMethod());
1208  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1209}
1210
1211void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
1212  HandleInvoke(invoke);
1213  // Add the hidden argument.
1214  invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM0));
1215}
1216
1217void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
1218  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1219  Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>();
1220  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1221          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1222  LocationSummary* locations = invoke->GetLocations();
1223  Location receiver = locations->InAt(0);
1224  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1225
1226  // Set the hidden argument.
1227  __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
1228  __ movd(invoke->GetLocations()->GetTemp(1).AsFpuRegister<XmmRegister>(), temp);
1229
1230  // temp = object->GetClass();
1231  if (receiver.IsStackSlot()) {
1232    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
1233    __ movl(temp, Address(temp, class_offset));
1234  } else {
1235    __ movl(temp, Address(receiver.AsRegister<Register>(), class_offset));
1236  }
1237  // temp = temp->GetImtEntryAt(method_offset);
1238  __ movl(temp, Address(temp, method_offset));
1239  // call temp->GetEntryPoint();
1240  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1241      kX86WordSize).Int32Value()));
1242
1243  DCHECK(!codegen_->IsLeafMethod());
1244  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1245}
1246
1247void LocationsBuilderX86::VisitNeg(HNeg* neg) {
1248  LocationSummary* locations =
1249      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1250  switch (neg->GetResultType()) {
1251    case Primitive::kPrimInt:
1252    case Primitive::kPrimLong:
1253      locations->SetInAt(0, Location::RequiresRegister());
1254      locations->SetOut(Location::SameAsFirstInput());
1255      break;
1256
1257    case Primitive::kPrimFloat:
1258      locations->SetInAt(0, Location::RequiresFpuRegister());
1259      locations->SetOut(Location::SameAsFirstInput());
1260      locations->AddTemp(Location::RequiresRegister());
1261      locations->AddTemp(Location::RequiresFpuRegister());
1262      break;
1263
1264    case Primitive::kPrimDouble:
1265      locations->SetInAt(0, Location::RequiresFpuRegister());
1266      locations->SetOut(Location::SameAsFirstInput());
1267      locations->AddTemp(Location::RequiresFpuRegister());
1268      break;
1269
1270    default:
1271      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1272  }
1273}
1274
1275void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
1276  LocationSummary* locations = neg->GetLocations();
1277  Location out = locations->Out();
1278  Location in = locations->InAt(0);
1279  switch (neg->GetResultType()) {
1280    case Primitive::kPrimInt:
1281      DCHECK(in.IsRegister());
1282      DCHECK(in.Equals(out));
1283      __ negl(out.AsRegister<Register>());
1284      break;
1285
1286    case Primitive::kPrimLong:
1287      DCHECK(in.IsRegisterPair());
1288      DCHECK(in.Equals(out));
1289      __ negl(out.AsRegisterPairLow<Register>());
1290      // Negation is similar to subtraction from zero.  The least
1291      // significant byte triggers a borrow when it is different from
1292      // zero; to take it into account, add 1 to the most significant
1293      // byte if the carry flag (CF) is set to 1 after the first NEGL
1294      // operation.
1295      __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
1296      __ negl(out.AsRegisterPairHigh<Register>());
1297      break;
1298
1299    case Primitive::kPrimFloat: {
1300      DCHECK(in.Equals(out));
1301      Register constant = locations->GetTemp(0).AsRegister<Register>();
1302      XmmRegister mask = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1303      // Implement float negation with an exclusive or with value
1304      // 0x80000000 (mask for bit 31, representing the sign of a
1305      // single-precision floating-point number).
1306      __ movl(constant, Immediate(INT32_C(0x80000000)));
1307      __ movd(mask, constant);
1308      __ xorps(out.AsFpuRegister<XmmRegister>(), mask);
1309      break;
1310    }
1311
1312    case Primitive::kPrimDouble: {
1313      DCHECK(in.Equals(out));
1314      XmmRegister mask = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1315      // Implement double negation with an exclusive or with value
1316      // 0x8000000000000000 (mask for bit 63, representing the sign of
1317      // a double-precision floating-point number).
1318      __ LoadLongConstant(mask, INT64_C(0x8000000000000000));
1319      __ xorpd(out.AsFpuRegister<XmmRegister>(), mask);
1320      break;
1321    }
1322
1323    default:
1324      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1325  }
1326}
1327
1328void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
1329  LocationSummary* locations =
1330      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1331  Primitive::Type result_type = conversion->GetResultType();
1332  Primitive::Type input_type = conversion->GetInputType();
1333  DCHECK_NE(result_type, input_type);
1334  switch (result_type) {
1335    case Primitive::kPrimByte:
1336      switch (input_type) {
1337        case Primitive::kPrimShort:
1338        case Primitive::kPrimInt:
1339        case Primitive::kPrimChar:
1340          // Processing a Dex `int-to-byte' instruction.
1341          locations->SetInAt(0, Location::Any());
1342          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1343          break;
1344
1345        default:
1346          LOG(FATAL) << "Unexpected type conversion from " << input_type
1347                     << " to " << result_type;
1348      }
1349      break;
1350
1351    case Primitive::kPrimShort:
1352      switch (input_type) {
1353        case Primitive::kPrimByte:
1354        case Primitive::kPrimInt:
1355        case Primitive::kPrimChar:
1356          // Processing a Dex `int-to-short' instruction.
1357          locations->SetInAt(0, Location::Any());
1358          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1359          break;
1360
1361        default:
1362          LOG(FATAL) << "Unexpected type conversion from " << input_type
1363                     << " to " << result_type;
1364      }
1365      break;
1366
1367    case Primitive::kPrimInt:
1368      switch (input_type) {
1369        case Primitive::kPrimLong:
1370          // Processing a Dex `long-to-int' instruction.
1371          locations->SetInAt(0, Location::Any());
1372          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1373          break;
1374
1375        case Primitive::kPrimFloat:
1376          // Processing a Dex `float-to-int' instruction.
1377          locations->SetInAt(0, Location::RequiresFpuRegister());
1378          locations->SetOut(Location::RequiresRegister());
1379          locations->AddTemp(Location::RequiresFpuRegister());
1380          break;
1381
1382        case Primitive::kPrimDouble:
1383          LOG(FATAL) << "Type conversion from " << input_type
1384                     << " to " << result_type << " not yet implemented";
1385          break;
1386
1387        default:
1388          LOG(FATAL) << "Unexpected type conversion from " << input_type
1389                     << " to " << result_type;
1390      }
1391      break;
1392
1393    case Primitive::kPrimLong:
1394      switch (input_type) {
1395        case Primitive::kPrimByte:
1396        case Primitive::kPrimShort:
1397        case Primitive::kPrimInt:
1398        case Primitive::kPrimChar:
1399          // Processing a Dex `int-to-long' instruction.
1400          locations->SetInAt(0, Location::RegisterLocation(EAX));
1401          locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1402          break;
1403
1404        case Primitive::kPrimFloat:
1405        case Primitive::kPrimDouble:
1406          LOG(FATAL) << "Type conversion from " << input_type << " to "
1407                     << result_type << " not yet implemented";
1408          break;
1409
1410        default:
1411          LOG(FATAL) << "Unexpected type conversion from " << input_type
1412                     << " to " << result_type;
1413      }
1414      break;
1415
1416    case Primitive::kPrimChar:
1417      switch (input_type) {
1418        case Primitive::kPrimByte:
1419        case Primitive::kPrimShort:
1420        case Primitive::kPrimInt:
1421          // Processing a Dex `int-to-char' instruction.
1422          locations->SetInAt(0, Location::Any());
1423          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1424          break;
1425
1426        default:
1427          LOG(FATAL) << "Unexpected type conversion from " << input_type
1428                     << " to " << result_type;
1429      }
1430      break;
1431
1432    case Primitive::kPrimFloat:
1433      switch (input_type) {
1434        case Primitive::kPrimByte:
1435        case Primitive::kPrimShort:
1436        case Primitive::kPrimInt:
1437        case Primitive::kPrimChar:
1438          // Processing a Dex `int-to-float' instruction.
1439          locations->SetInAt(0, Location::RequiresRegister());
1440          locations->SetOut(Location::RequiresFpuRegister());
1441          break;
1442
1443        case Primitive::kPrimLong:
1444          // Processing a Dex `long-to-float' instruction.
1445          locations->SetInAt(0, Location::RequiresRegister());
1446          locations->SetOut(Location::RequiresFpuRegister());
1447          locations->AddTemp(Location::RequiresFpuRegister());
1448          locations->AddTemp(Location::RequiresFpuRegister());
1449          break;
1450
1451        case Primitive::kPrimDouble:
1452          LOG(FATAL) << "Type conversion from " << input_type
1453                     << " to " << result_type << " not yet implemented";
1454          break;
1455
1456        default:
1457          LOG(FATAL) << "Unexpected type conversion from " << input_type
1458                     << " to " << result_type;
1459      };
1460      break;
1461
1462    case Primitive::kPrimDouble:
1463      switch (input_type) {
1464        case Primitive::kPrimByte:
1465        case Primitive::kPrimShort:
1466        case Primitive::kPrimInt:
1467        case Primitive::kPrimChar:
1468          // Processing a Dex `int-to-double' instruction.
1469          locations->SetInAt(0, Location::RequiresRegister());
1470          locations->SetOut(Location::RequiresFpuRegister());
1471          break;
1472
1473        case Primitive::kPrimLong:
1474          // Processing a Dex `long-to-double' instruction.
1475          locations->SetInAt(0, Location::RequiresRegister());
1476          locations->SetOut(Location::RequiresFpuRegister());
1477          locations->AddTemp(Location::RequiresFpuRegister());
1478          locations->AddTemp(Location::RequiresFpuRegister());
1479          break;
1480
1481        case Primitive::kPrimFloat:
1482          LOG(FATAL) << "Type conversion from " << input_type
1483                     << " to " << result_type << " not yet implemented";
1484          break;
1485
1486        default:
1487          LOG(FATAL) << "Unexpected type conversion from " << input_type
1488                     << " to " << result_type;
1489      }
1490      break;
1491
1492    default:
1493      LOG(FATAL) << "Unexpected type conversion from " << input_type
1494                 << " to " << result_type;
1495  }
1496}
1497
1498void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
1499  LocationSummary* locations = conversion->GetLocations();
1500  Location out = locations->Out();
1501  Location in = locations->InAt(0);
1502  Primitive::Type result_type = conversion->GetResultType();
1503  Primitive::Type input_type = conversion->GetInputType();
1504  DCHECK_NE(result_type, input_type);
1505  switch (result_type) {
1506    case Primitive::kPrimByte:
1507      switch (input_type) {
1508        case Primitive::kPrimShort:
1509        case Primitive::kPrimInt:
1510        case Primitive::kPrimChar:
1511          // Processing a Dex `int-to-byte' instruction.
1512          if (in.IsRegister()) {
1513            __ movsxb(out.AsRegister<Register>(), in.AsRegister<ByteRegister>());
1514          } else if (in.IsStackSlot()) {
1515            __ movsxb(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
1516          } else {
1517            DCHECK(in.GetConstant()->IsIntConstant());
1518            int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
1519            __ movl(out.AsRegister<Register>(), Immediate(static_cast<int8_t>(value)));
1520          }
1521          break;
1522
1523        default:
1524          LOG(FATAL) << "Unexpected type conversion from " << input_type
1525                     << " to " << result_type;
1526      }
1527      break;
1528
1529    case Primitive::kPrimShort:
1530      switch (input_type) {
1531        case Primitive::kPrimByte:
1532        case Primitive::kPrimInt:
1533        case Primitive::kPrimChar:
1534          // Processing a Dex `int-to-short' instruction.
1535          if (in.IsRegister()) {
1536            __ movsxw(out.AsRegister<Register>(), in.AsRegister<Register>());
1537          } else if (in.IsStackSlot()) {
1538            __ movsxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
1539          } else {
1540            DCHECK(in.GetConstant()->IsIntConstant());
1541            int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
1542            __ movl(out.AsRegister<Register>(), Immediate(static_cast<int16_t>(value)));
1543          }
1544          break;
1545
1546        default:
1547          LOG(FATAL) << "Unexpected type conversion from " << input_type
1548                     << " to " << result_type;
1549      }
1550      break;
1551
1552    case Primitive::kPrimInt:
1553      switch (input_type) {
1554        case Primitive::kPrimLong:
1555          // Processing a Dex `long-to-int' instruction.
1556          if (in.IsRegisterPair()) {
1557            __ movl(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>());
1558          } else if (in.IsDoubleStackSlot()) {
1559            __ movl(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
1560          } else {
1561            DCHECK(in.IsConstant());
1562            DCHECK(in.GetConstant()->IsLongConstant());
1563            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1564            __ movl(out.AsRegister<Register>(), Immediate(static_cast<int32_t>(value)));
1565          }
1566          break;
1567
1568        case Primitive::kPrimFloat: {
1569          // Processing a Dex `float-to-int' instruction.
1570          XmmRegister input = in.AsFpuRegister<XmmRegister>();
1571          Register output = out.AsRegister<Register>();
1572          XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1573          Label done, nan;
1574
1575          __ movl(output, Immediate(kPrimIntMax));
1576          // temp = int-to-float(output)
1577          __ cvtsi2ss(temp, output);
1578          // if input >= temp goto done
1579          __ comiss(input, temp);
1580          __ j(kAboveEqual, &done);
1581          // if input == NaN goto nan
1582          __ j(kUnordered, &nan);
1583          // output = float-to-int-truncate(input)
1584          __ cvttss2si(output, input);
1585          __ jmp(&done);
1586          __ Bind(&nan);
1587          //  output = 0
1588          __ xorl(output, output);
1589          __ Bind(&done);
1590          break;
1591        }
1592
1593        case Primitive::kPrimDouble:
1594          LOG(FATAL) << "Type conversion from " << input_type
1595                     << " to " << result_type << " not yet implemented";
1596          break;
1597
1598        default:
1599          LOG(FATAL) << "Unexpected type conversion from " << input_type
1600                     << " to " << result_type;
1601      }
1602      break;
1603
1604    case Primitive::kPrimLong:
1605      switch (input_type) {
1606        case Primitive::kPrimByte:
1607        case Primitive::kPrimShort:
1608        case Primitive::kPrimInt:
1609        case Primitive::kPrimChar:
1610          // Processing a Dex `int-to-long' instruction.
1611          DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
1612          DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
1613          DCHECK_EQ(in.AsRegister<Register>(), EAX);
1614          __ cdq();
1615          break;
1616
1617        case Primitive::kPrimFloat:
1618        case Primitive::kPrimDouble:
1619          LOG(FATAL) << "Type conversion from " << input_type << " to "
1620                     << result_type << " not yet implemented";
1621          break;
1622
1623        default:
1624          LOG(FATAL) << "Unexpected type conversion from " << input_type
1625                     << " to " << result_type;
1626      }
1627      break;
1628
1629    case Primitive::kPrimChar:
1630      switch (input_type) {
1631        case Primitive::kPrimByte:
1632        case Primitive::kPrimShort:
1633        case Primitive::kPrimInt:
1634          // Processing a Dex `Process a Dex `int-to-char'' instruction.
1635          if (in.IsRegister()) {
1636            __ movzxw(out.AsRegister<Register>(), in.AsRegister<Register>());
1637          } else if (in.IsStackSlot()) {
1638            __ movzxw(out.AsRegister<Register>(), Address(ESP, in.GetStackIndex()));
1639          } else {
1640            DCHECK(in.GetConstant()->IsIntConstant());
1641            int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
1642            __ movl(out.AsRegister<Register>(), Immediate(static_cast<uint16_t>(value)));
1643          }
1644          break;
1645
1646        default:
1647          LOG(FATAL) << "Unexpected type conversion from " << input_type
1648                     << " to " << result_type;
1649      }
1650      break;
1651
1652    case Primitive::kPrimFloat:
1653      switch (input_type) {
1654        case Primitive::kPrimByte:
1655        case Primitive::kPrimShort:
1656        case Primitive::kPrimInt:
1657        case Primitive::kPrimChar:
1658          // Processing a Dex `int-to-float' instruction.
1659          __ cvtsi2ss(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
1660          break;
1661
1662        case Primitive::kPrimLong: {
1663          // Processing a Dex `long-to-float' instruction.
1664          Register low = in.AsRegisterPairLow<Register>();
1665          Register high = in.AsRegisterPairHigh<Register>();
1666          XmmRegister result = out.AsFpuRegister<XmmRegister>();
1667          XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1668          XmmRegister constant = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1669
1670          // Operations use doubles for precision reasons (each 32-bit
1671          // half of a long fits in the 53-bit mantissa of a double,
1672          // but not in the 24-bit mantissa of a float).  This is
1673          // especially important for the low bits.  The result is
1674          // eventually converted to float.
1675
1676          // low = low - 2^31 (to prevent bit 31 of `low` to be
1677          // interpreted as a sign bit)
1678          __ subl(low, Immediate(0x80000000));
1679          // temp = int-to-double(high)
1680          __ cvtsi2sd(temp, high);
1681          // temp = temp * 2^32
1682          __ LoadLongConstant(constant, k2Pow32EncodingForDouble);
1683          __ mulsd(temp, constant);
1684          // result = int-to-double(low)
1685          __ cvtsi2sd(result, low);
1686          // result = result + 2^31 (restore the original value of `low`)
1687          __ LoadLongConstant(constant, k2Pow31EncodingForDouble);
1688          __ addsd(result, constant);
1689          // result = result + temp
1690          __ addsd(result, temp);
1691          // result = double-to-float(result)
1692          __ cvtsd2ss(result, result);
1693          break;
1694        }
1695
1696        case Primitive::kPrimDouble:
1697          LOG(FATAL) << "Type conversion from " << input_type
1698                     << " to " << result_type << " not yet implemented";
1699          break;
1700
1701        default:
1702          LOG(FATAL) << "Unexpected type conversion from " << input_type
1703                     << " to " << result_type;
1704      };
1705      break;
1706
1707    case Primitive::kPrimDouble:
1708      switch (input_type) {
1709        case Primitive::kPrimByte:
1710        case Primitive::kPrimShort:
1711        case Primitive::kPrimInt:
1712        case Primitive::kPrimChar:
1713          // Processing a Dex `int-to-double' instruction.
1714          __ cvtsi2sd(out.AsFpuRegister<XmmRegister>(), in.AsRegister<Register>());
1715          break;
1716
1717        case Primitive::kPrimLong: {
1718          // Processing a Dex `long-to-double' instruction.
1719          Register low = in.AsRegisterPairLow<Register>();
1720          Register high = in.AsRegisterPairHigh<Register>();
1721          XmmRegister result = out.AsFpuRegister<XmmRegister>();
1722          XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
1723          XmmRegister constant = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
1724
1725          // low = low - 2^31 (to prevent bit 31 of `low` to be
1726          // interpreted as a sign bit)
1727          __ subl(low, Immediate(0x80000000));
1728          // temp = int-to-double(high)
1729          __ cvtsi2sd(temp, high);
1730          // temp = temp * 2^32
1731          __ LoadLongConstant(constant, k2Pow32EncodingForDouble);
1732          __ mulsd(temp, constant);
1733          // result = int-to-double(low)
1734          __ cvtsi2sd(result, low);
1735          // result = result + 2^31 (restore the original value of `low`)
1736          __ LoadLongConstant(constant, k2Pow31EncodingForDouble);
1737          __ addsd(result, constant);
1738          // result = result + temp
1739          __ addsd(result, temp);
1740          break;
1741        }
1742
1743        case Primitive::kPrimFloat:
1744          LOG(FATAL) << "Type conversion from " << input_type
1745                     << " to " << result_type << " not yet implemented";
1746          break;
1747
1748        default:
1749          LOG(FATAL) << "Unexpected type conversion from " << input_type
1750                     << " to " << result_type;
1751      };
1752      break;
1753
1754    default:
1755      LOG(FATAL) << "Unexpected type conversion from " << input_type
1756                 << " to " << result_type;
1757  }
1758}
1759
1760void LocationsBuilderX86::VisitAdd(HAdd* add) {
1761  LocationSummary* locations =
1762      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1763  switch (add->GetResultType()) {
1764    case Primitive::kPrimInt:
1765    case Primitive::kPrimLong: {
1766      locations->SetInAt(0, Location::RequiresRegister());
1767      locations->SetInAt(1, Location::Any());
1768      locations->SetOut(Location::SameAsFirstInput());
1769      break;
1770    }
1771
1772    case Primitive::kPrimFloat:
1773    case Primitive::kPrimDouble: {
1774      locations->SetInAt(0, Location::RequiresFpuRegister());
1775      locations->SetInAt(1, Location::Any());
1776      locations->SetOut(Location::SameAsFirstInput());
1777      break;
1778    }
1779
1780    default:
1781      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1782      break;
1783  }
1784}
1785
1786void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
1787  LocationSummary* locations = add->GetLocations();
1788  Location first = locations->InAt(0);
1789  Location second = locations->InAt(1);
1790  DCHECK(first.Equals(locations->Out()));
1791  switch (add->GetResultType()) {
1792    case Primitive::kPrimInt: {
1793      if (second.IsRegister()) {
1794        __ addl(first.AsRegister<Register>(), second.AsRegister<Register>());
1795      } else if (second.IsConstant()) {
1796        __ addl(first.AsRegister<Register>(),
1797                Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
1798      } else {
1799        __ addl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
1800      }
1801      break;
1802    }
1803
1804    case Primitive::kPrimLong: {
1805      if (second.IsRegisterPair()) {
1806        __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
1807        __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
1808      } else {
1809        __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
1810        __ adcl(first.AsRegisterPairHigh<Register>(),
1811                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
1812      }
1813      break;
1814    }
1815
1816    case Primitive::kPrimFloat: {
1817      if (second.IsFpuRegister()) {
1818        __ addss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
1819      } else {
1820        __ addss(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1821      }
1822      break;
1823    }
1824
1825    case Primitive::kPrimDouble: {
1826      if (second.IsFpuRegister()) {
1827        __ addsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
1828      } else {
1829        __ addsd(first.AsFpuRegister<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1830      }
1831      break;
1832    }
1833
1834    default:
1835      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1836  }
1837}
1838
1839void LocationsBuilderX86::VisitSub(HSub* sub) {
1840  LocationSummary* locations =
1841      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1842  switch (sub->GetResultType()) {
1843    case Primitive::kPrimInt:
1844    case Primitive::kPrimLong: {
1845      locations->SetInAt(0, Location::RequiresRegister());
1846      locations->SetInAt(1, Location::Any());
1847      locations->SetOut(Location::SameAsFirstInput());
1848      break;
1849    }
1850    case Primitive::kPrimFloat:
1851    case Primitive::kPrimDouble: {
1852      locations->SetInAt(0, Location::RequiresFpuRegister());
1853      locations->SetInAt(1, Location::RequiresFpuRegister());
1854      locations->SetOut(Location::SameAsFirstInput());
1855      break;
1856    }
1857
1858    default:
1859      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1860  }
1861}
1862
1863void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
1864  LocationSummary* locations = sub->GetLocations();
1865  Location first = locations->InAt(0);
1866  Location second = locations->InAt(1);
1867  DCHECK(first.Equals(locations->Out()));
1868  switch (sub->GetResultType()) {
1869    case Primitive::kPrimInt: {
1870      if (second.IsRegister()) {
1871        __ subl(first.AsRegister<Register>(), second.AsRegister<Register>());
1872      } else if (second.IsConstant()) {
1873        __ subl(first.AsRegister<Register>(),
1874                Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
1875      } else {
1876        __ subl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
1877      }
1878      break;
1879    }
1880
1881    case Primitive::kPrimLong: {
1882      if (second.IsRegisterPair()) {
1883        __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
1884        __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
1885      } else {
1886        __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
1887        __ sbbl(first.AsRegisterPairHigh<Register>(),
1888                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
1889      }
1890      break;
1891    }
1892
1893    case Primitive::kPrimFloat: {
1894      __ subss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
1895      break;
1896    }
1897
1898    case Primitive::kPrimDouble: {
1899      __ subsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
1900      break;
1901    }
1902
1903    default:
1904      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1905  }
1906}
1907
1908void LocationsBuilderX86::VisitMul(HMul* mul) {
1909  LocationSummary* locations =
1910      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1911  switch (mul->GetResultType()) {
1912    case Primitive::kPrimInt:
1913      locations->SetInAt(0, Location::RequiresRegister());
1914      locations->SetInAt(1, Location::Any());
1915      locations->SetOut(Location::SameAsFirstInput());
1916      break;
1917    case Primitive::kPrimLong: {
1918      locations->SetInAt(0, Location::RequiresRegister());
1919      // TODO: Currently this handles only stack operands:
1920      // - we don't have enough registers because we currently use Quick ABI.
1921      // - by the time we have a working register allocator we will probably change the ABI
1922      // and fix the above.
1923      // - we don't have a way yet to request operands on stack but the base line compiler
1924      // will leave the operands on the stack with Any().
1925      locations->SetInAt(1, Location::Any());
1926      locations->SetOut(Location::SameAsFirstInput());
1927      // Needed for imul on 32bits with 64bits output.
1928      locations->AddTemp(Location::RegisterLocation(EAX));
1929      locations->AddTemp(Location::RegisterLocation(EDX));
1930      break;
1931    }
1932    case Primitive::kPrimFloat:
1933    case Primitive::kPrimDouble: {
1934      locations->SetInAt(0, Location::RequiresFpuRegister());
1935      locations->SetInAt(1, Location::RequiresFpuRegister());
1936      locations->SetOut(Location::SameAsFirstInput());
1937      break;
1938    }
1939
1940    default:
1941      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1942  }
1943}
1944
1945void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
1946  LocationSummary* locations = mul->GetLocations();
1947  Location first = locations->InAt(0);
1948  Location second = locations->InAt(1);
1949  DCHECK(first.Equals(locations->Out()));
1950
1951  switch (mul->GetResultType()) {
1952    case Primitive::kPrimInt: {
1953      if (second.IsRegister()) {
1954        __ imull(first.AsRegister<Register>(), second.AsRegister<Register>());
1955      } else if (second.IsConstant()) {
1956        Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
1957        __ imull(first.AsRegister<Register>(), imm);
1958      } else {
1959        DCHECK(second.IsStackSlot());
1960        __ imull(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
1961      }
1962      break;
1963    }
1964
1965    case Primitive::kPrimLong: {
1966      DCHECK(second.IsDoubleStackSlot());
1967
1968      Register in1_hi = first.AsRegisterPairHigh<Register>();
1969      Register in1_lo = first.AsRegisterPairLow<Register>();
1970      Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
1971      Address in2_lo(ESP, second.GetStackIndex());
1972      Register eax = locations->GetTemp(0).AsRegister<Register>();
1973      Register edx = locations->GetTemp(1).AsRegister<Register>();
1974
1975      DCHECK_EQ(EAX, eax);
1976      DCHECK_EQ(EDX, edx);
1977
1978      // input: in1 - 64 bits, in2 - 64 bits
1979      // output: in1
1980      // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1981      // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1982      // parts: in1.lo = (in1.lo * in2.lo)[31:0]
1983
1984      __ movl(eax, in2_hi);
1985      // eax <- in1.lo * in2.hi
1986      __ imull(eax, in1_lo);
1987      // in1.hi <- in1.hi * in2.lo
1988      __ imull(in1_hi, in2_lo);
1989      // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1990      __ addl(in1_hi, eax);
1991      // move in1_lo to eax to prepare for double precision
1992      __ movl(eax, in1_lo);
1993      // edx:eax <- in1.lo * in2.lo
1994      __ mull(in2_lo);
1995      // in1.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1996      __ addl(in1_hi, edx);
1997      // in1.lo <- (in1.lo * in2.lo)[31:0];
1998      __ movl(in1_lo, eax);
1999
2000      break;
2001    }
2002
2003    case Primitive::kPrimFloat: {
2004      __ mulss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2005      break;
2006    }
2007
2008    case Primitive::kPrimDouble: {
2009      __ mulsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2010      break;
2011    }
2012
2013    default:
2014      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
2015  }
2016}
2017
2018void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instruction) {
2019  DCHECK(instruction->IsDiv() || instruction->IsRem());
2020
2021  LocationSummary* locations = instruction->GetLocations();
2022  Location out = locations->Out();
2023  Location first = locations->InAt(0);
2024  Location second = locations->InAt(1);
2025  bool is_div = instruction->IsDiv();
2026
2027  switch (instruction->GetResultType()) {
2028    case Primitive::kPrimInt: {
2029      Register second_reg = second.AsRegister<Register>();
2030      DCHECK_EQ(EAX, first.AsRegister<Register>());
2031      DCHECK_EQ(is_div ? EAX : EDX, out.AsRegister<Register>());
2032
2033      SlowPathCodeX86* slow_path =
2034          new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86(out.AsRegister<Register>(),
2035                                                                 is_div);
2036      codegen_->AddSlowPath(slow_path);
2037
2038      // 0x80000000/-1 triggers an arithmetic exception!
2039      // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
2040      // it's safe to just use negl instead of more complex comparisons.
2041
2042      __ cmpl(second_reg, Immediate(-1));
2043      __ j(kEqual, slow_path->GetEntryLabel());
2044
2045      // edx:eax <- sign-extended of eax
2046      __ cdq();
2047      // eax = quotient, edx = remainder
2048      __ idivl(second_reg);
2049
2050      __ Bind(slow_path->GetExitLabel());
2051      break;
2052    }
2053
2054    case Primitive::kPrimLong: {
2055      InvokeRuntimeCallingConvention calling_convention;
2056      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
2057      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
2058      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
2059      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
2060      DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
2061      DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
2062
2063      if (is_div) {
2064        __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLdiv)));
2065      } else {
2066        __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLmod)));
2067      }
2068      uint32_t dex_pc = is_div
2069          ? instruction->AsDiv()->GetDexPc()
2070          : instruction->AsRem()->GetDexPc();
2071      codegen_->RecordPcInfo(instruction, dex_pc);
2072
2073      break;
2074    }
2075
2076    default:
2077      LOG(FATAL) << "Unexpected type for GenerateDivRemIntegral " << instruction->GetResultType();
2078  }
2079}
2080
2081void LocationsBuilderX86::VisitDiv(HDiv* div) {
2082  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
2083      ? LocationSummary::kCall
2084      : LocationSummary::kNoCall;
2085  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
2086
2087  switch (div->GetResultType()) {
2088    case Primitive::kPrimInt: {
2089      locations->SetInAt(0, Location::RegisterLocation(EAX));
2090      locations->SetInAt(1, Location::RequiresRegister());
2091      locations->SetOut(Location::SameAsFirstInput());
2092      // Intel uses edx:eax as the dividend.
2093      locations->AddTemp(Location::RegisterLocation(EDX));
2094      break;
2095    }
2096    case Primitive::kPrimLong: {
2097      InvokeRuntimeCallingConvention calling_convention;
2098      locations->SetInAt(0, Location::RegisterPairLocation(
2099          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2100      locations->SetInAt(1, Location::RegisterPairLocation(
2101          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2102      // Runtime helper puts the result in EAX, EDX.
2103      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
2104      break;
2105    }
2106    case Primitive::kPrimFloat:
2107    case Primitive::kPrimDouble: {
2108      locations->SetInAt(0, Location::RequiresFpuRegister());
2109      locations->SetInAt(1, Location::RequiresFpuRegister());
2110      locations->SetOut(Location::SameAsFirstInput());
2111      break;
2112    }
2113
2114    default:
2115      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2116  }
2117}
2118
2119void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
2120  LocationSummary* locations = div->GetLocations();
2121  Location out = locations->Out();
2122  Location first = locations->InAt(0);
2123  Location second = locations->InAt(1);
2124
2125  switch (div->GetResultType()) {
2126    case Primitive::kPrimInt:
2127    case Primitive::kPrimLong: {
2128      GenerateDivRemIntegral(div);
2129      break;
2130    }
2131
2132    case Primitive::kPrimFloat: {
2133      DCHECK(first.Equals(out));
2134      __ divss(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2135      break;
2136    }
2137
2138    case Primitive::kPrimDouble: {
2139      DCHECK(first.Equals(out));
2140      __ divsd(first.AsFpuRegister<XmmRegister>(), second.AsFpuRegister<XmmRegister>());
2141      break;
2142    }
2143
2144    default:
2145      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2146  }
2147}
2148
2149void LocationsBuilderX86::VisitRem(HRem* rem) {
2150  LocationSummary::CallKind call_kind = rem->GetResultType() == Primitive::kPrimLong
2151      ? LocationSummary::kCall
2152      : LocationSummary::kNoCall;
2153  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2154
2155  switch (rem->GetResultType()) {
2156    case Primitive::kPrimInt: {
2157      locations->SetInAt(0, Location::RegisterLocation(EAX));
2158      locations->SetInAt(1, Location::RequiresRegister());
2159      locations->SetOut(Location::RegisterLocation(EDX));
2160      break;
2161    }
2162    case Primitive::kPrimLong: {
2163      InvokeRuntimeCallingConvention calling_convention;
2164      locations->SetInAt(0, Location::RegisterPairLocation(
2165          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2166      locations->SetInAt(1, Location::RegisterPairLocation(
2167          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2168      // Runtime helper puts the result in EAX, EDX.
2169      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
2170      break;
2171    }
2172    case Primitive::kPrimFloat:
2173    case Primitive::kPrimDouble: {
2174      LOG(FATAL) << "Unimplemented rem type " << rem->GetResultType();
2175      break;
2176    }
2177
2178    default:
2179      LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
2180  }
2181}
2182
2183void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {
2184  Primitive::Type type = rem->GetResultType();
2185  switch (type) {
2186    case Primitive::kPrimInt:
2187    case Primitive::kPrimLong: {
2188      GenerateDivRemIntegral(rem);
2189      break;
2190    }
2191    case Primitive::kPrimFloat:
2192    case Primitive::kPrimDouble: {
2193      LOG(FATAL) << "Unimplemented rem type " << type;
2194      break;
2195    }
2196    default:
2197      LOG(FATAL) << "Unexpected rem type " << type;
2198  }
2199}
2200
2201void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2202  LocationSummary* locations =
2203      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2204  switch (instruction->GetType()) {
2205    case Primitive::kPrimInt: {
2206      locations->SetInAt(0, Location::Any());
2207      break;
2208    }
2209    case Primitive::kPrimLong: {
2210      locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2211      if (!instruction->IsConstant()) {
2212        locations->AddTemp(Location::RequiresRegister());
2213      }
2214      break;
2215    }
2216    default:
2217      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2218  }
2219  if (instruction->HasUses()) {
2220    locations->SetOut(Location::SameAsFirstInput());
2221  }
2222}
2223
2224void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2225  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86(instruction);
2226  codegen_->AddSlowPath(slow_path);
2227
2228  LocationSummary* locations = instruction->GetLocations();
2229  Location value = locations->InAt(0);
2230
2231  switch (instruction->GetType()) {
2232    case Primitive::kPrimInt: {
2233      if (value.IsRegister()) {
2234        __ testl(value.AsRegister<Register>(), value.AsRegister<Register>());
2235        __ j(kEqual, slow_path->GetEntryLabel());
2236      } else if (value.IsStackSlot()) {
2237        __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
2238        __ j(kEqual, slow_path->GetEntryLabel());
2239      } else {
2240        DCHECK(value.IsConstant()) << value;
2241        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2242        __ jmp(slow_path->GetEntryLabel());
2243        }
2244      }
2245      break;
2246    }
2247    case Primitive::kPrimLong: {
2248      if (value.IsRegisterPair()) {
2249        Register temp = locations->GetTemp(0).AsRegister<Register>();
2250        __ movl(temp, value.AsRegisterPairLow<Register>());
2251        __ orl(temp, value.AsRegisterPairHigh<Register>());
2252        __ j(kEqual, slow_path->GetEntryLabel());
2253      } else {
2254        DCHECK(value.IsConstant()) << value;
2255        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2256          __ jmp(slow_path->GetEntryLabel());
2257        }
2258      }
2259      break;
2260    }
2261    default:
2262      LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
2263  }
2264}
2265
2266void LocationsBuilderX86::HandleShift(HBinaryOperation* op) {
2267  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2268
2269  LocationSummary* locations =
2270      new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
2271
2272  switch (op->GetResultType()) {
2273    case Primitive::kPrimInt: {
2274      locations->SetInAt(0, Location::RequiresRegister());
2275      // The shift count needs to be in CL.
2276      locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, op->InputAt(1)));
2277      locations->SetOut(Location::SameAsFirstInput());
2278      break;
2279    }
2280    case Primitive::kPrimLong: {
2281      locations->SetInAt(0, Location::RequiresRegister());
2282      // The shift count needs to be in CL.
2283      locations->SetInAt(1, Location::RegisterLocation(ECX));
2284      locations->SetOut(Location::SameAsFirstInput());
2285      break;
2286    }
2287    default:
2288      LOG(FATAL) << "Unexpected op type " << op->GetResultType();
2289  }
2290}
2291
2292void InstructionCodeGeneratorX86::HandleShift(HBinaryOperation* op) {
2293  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2294
2295  LocationSummary* locations = op->GetLocations();
2296  Location first = locations->InAt(0);
2297  Location second = locations->InAt(1);
2298  DCHECK(first.Equals(locations->Out()));
2299
2300  switch (op->GetResultType()) {
2301    case Primitive::kPrimInt: {
2302      Register first_reg = first.AsRegister<Register>();
2303      if (second.IsRegister()) {
2304        Register second_reg = second.AsRegister<Register>();
2305        DCHECK_EQ(ECX, second_reg);
2306        if (op->IsShl()) {
2307          __ shll(first_reg, second_reg);
2308        } else if (op->IsShr()) {
2309          __ sarl(first_reg, second_reg);
2310        } else {
2311          __ shrl(first_reg, second_reg);
2312        }
2313      } else {
2314        Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
2315        if (op->IsShl()) {
2316          __ shll(first_reg, imm);
2317        } else if (op->IsShr()) {
2318          __ sarl(first_reg, imm);
2319        } else {
2320          __ shrl(first_reg, imm);
2321        }
2322      }
2323      break;
2324    }
2325    case Primitive::kPrimLong: {
2326      Register second_reg = second.AsRegister<Register>();
2327      DCHECK_EQ(ECX, second_reg);
2328      if (op->IsShl()) {
2329        GenerateShlLong(first, second_reg);
2330      } else if (op->IsShr()) {
2331        GenerateShrLong(first, second_reg);
2332      } else {
2333        GenerateUShrLong(first, second_reg);
2334      }
2335      break;
2336    }
2337    default:
2338      LOG(FATAL) << "Unexpected op type " << op->GetResultType();
2339  }
2340}
2341
2342void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, Register shifter) {
2343  Label done;
2344  __ shld(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>(), shifter);
2345  __ shll(loc.AsRegisterPairLow<Register>(), shifter);
2346  __ testl(shifter, Immediate(32));
2347  __ j(kEqual, &done);
2348  __ movl(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>());
2349  __ movl(loc.AsRegisterPairLow<Register>(), Immediate(0));
2350  __ Bind(&done);
2351}
2352
2353void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, Register shifter) {
2354  Label done;
2355  __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
2356  __ sarl(loc.AsRegisterPairHigh<Register>(), shifter);
2357  __ testl(shifter, Immediate(32));
2358  __ j(kEqual, &done);
2359  __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
2360  __ sarl(loc.AsRegisterPairHigh<Register>(), Immediate(31));
2361  __ Bind(&done);
2362}
2363
2364void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, Register shifter) {
2365  Label done;
2366  __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
2367  __ shrl(loc.AsRegisterPairHigh<Register>(), shifter);
2368  __ testl(shifter, Immediate(32));
2369  __ j(kEqual, &done);
2370  __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
2371  __ movl(loc.AsRegisterPairHigh<Register>(), Immediate(0));
2372  __ Bind(&done);
2373}
2374
2375void LocationsBuilderX86::VisitShl(HShl* shl) {
2376  HandleShift(shl);
2377}
2378
2379void InstructionCodeGeneratorX86::VisitShl(HShl* shl) {
2380  HandleShift(shl);
2381}
2382
2383void LocationsBuilderX86::VisitShr(HShr* shr) {
2384  HandleShift(shr);
2385}
2386
2387void InstructionCodeGeneratorX86::VisitShr(HShr* shr) {
2388  HandleShift(shr);
2389}
2390
2391void LocationsBuilderX86::VisitUShr(HUShr* ushr) {
2392  HandleShift(ushr);
2393}
2394
2395void InstructionCodeGeneratorX86::VisitUShr(HUShr* ushr) {
2396  HandleShift(ushr);
2397}
2398
2399void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
2400  LocationSummary* locations =
2401      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2402  locations->SetOut(Location::RegisterLocation(EAX));
2403  InvokeRuntimeCallingConvention calling_convention;
2404  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2405  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2406}
2407
2408void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
2409  InvokeRuntimeCallingConvention calling_convention;
2410  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2411  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
2412
2413  __ fs()->call(
2414      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
2415
2416  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2417  DCHECK(!codegen_->IsLeafMethod());
2418}
2419
2420void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
2421  LocationSummary* locations =
2422      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2423  locations->SetOut(Location::RegisterLocation(EAX));
2424  InvokeRuntimeCallingConvention calling_convention;
2425  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2426  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2427  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2428}
2429
2430void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
2431  InvokeRuntimeCallingConvention calling_convention;
2432  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2433  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
2434
2435  __ fs()->call(
2436      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocArrayWithAccessCheck)));
2437
2438  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2439  DCHECK(!codegen_->IsLeafMethod());
2440}
2441
2442void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
2443  LocationSummary* locations =
2444      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2445  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2446  if (location.IsStackSlot()) {
2447    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2448  } else if (location.IsDoubleStackSlot()) {
2449    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2450  }
2451  locations->SetOut(location);
2452}
2453
2454void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
2455  UNUSED(instruction);
2456}
2457
2458void LocationsBuilderX86::VisitNot(HNot* not_) {
2459  LocationSummary* locations =
2460      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2461  locations->SetInAt(0, Location::RequiresRegister());
2462  locations->SetOut(Location::SameAsFirstInput());
2463}
2464
2465void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
2466  LocationSummary* locations = not_->GetLocations();
2467  Location in = locations->InAt(0);
2468  Location out = locations->Out();
2469  DCHECK(in.Equals(out));
2470  switch (not_->InputAt(0)->GetType()) {
2471    case Primitive::kPrimBoolean:
2472      __ xorl(out.AsRegister<Register>(), Immediate(1));
2473      break;
2474
2475    case Primitive::kPrimInt:
2476      __ notl(out.AsRegister<Register>());
2477      break;
2478
2479    case Primitive::kPrimLong:
2480      __ notl(out.AsRegisterPairLow<Register>());
2481      __ notl(out.AsRegisterPairHigh<Register>());
2482      break;
2483
2484    default:
2485      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2486  }
2487}
2488
2489void LocationsBuilderX86::VisitCompare(HCompare* compare) {
2490  LocationSummary* locations =
2491      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2492  switch (compare->InputAt(0)->GetType()) {
2493    case Primitive::kPrimLong: {
2494      locations->SetInAt(0, Location::RequiresRegister());
2495      // TODO: we set any here but we don't handle constants
2496      locations->SetInAt(1, Location::Any());
2497      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2498      break;
2499    }
2500    case Primitive::kPrimFloat:
2501    case Primitive::kPrimDouble: {
2502      locations->SetInAt(0, Location::RequiresFpuRegister());
2503      locations->SetInAt(1, Location::RequiresFpuRegister());
2504      locations->SetOut(Location::RequiresRegister());
2505      break;
2506    }
2507    default:
2508      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2509  }
2510}
2511
2512void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
2513  LocationSummary* locations = compare->GetLocations();
2514  Register out = locations->Out().AsRegister<Register>();
2515  Location left = locations->InAt(0);
2516  Location right = locations->InAt(1);
2517
2518  Label less, greater, done;
2519  switch (compare->InputAt(0)->GetType()) {
2520    case Primitive::kPrimLong: {
2521      if (right.IsRegisterPair()) {
2522        __ cmpl(left.AsRegisterPairHigh<Register>(), right.AsRegisterPairHigh<Register>());
2523      } else {
2524        DCHECK(right.IsDoubleStackSlot());
2525        __ cmpl(left.AsRegisterPairHigh<Register>(),
2526                Address(ESP, right.GetHighStackIndex(kX86WordSize)));
2527      }
2528      __ j(kLess, &less);  // Signed compare.
2529      __ j(kGreater, &greater);  // Signed compare.
2530      if (right.IsRegisterPair()) {
2531        __ cmpl(left.AsRegisterPairLow<Register>(), right.AsRegisterPairLow<Register>());
2532      } else {
2533        DCHECK(right.IsDoubleStackSlot());
2534        __ cmpl(left.AsRegisterPairLow<Register>(), Address(ESP, right.GetStackIndex()));
2535      }
2536      break;
2537    }
2538    case Primitive::kPrimFloat: {
2539      __ ucomiss(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
2540      __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
2541      break;
2542    }
2543    case Primitive::kPrimDouble: {
2544      __ ucomisd(left.AsFpuRegister<XmmRegister>(), right.AsFpuRegister<XmmRegister>());
2545      __ j(kUnordered, compare->IsGtBias() ? &greater : &less);
2546      break;
2547    }
2548    default:
2549      LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType();
2550  }
2551  __ movl(out, Immediate(0));
2552  __ j(kEqual, &done);
2553  __ j(kBelow, &less);  // kBelow is for CF (unsigned & floats).
2554
2555  __ Bind(&greater);
2556  __ movl(out, Immediate(1));
2557  __ jmp(&done);
2558
2559  __ Bind(&less);
2560  __ movl(out, Immediate(-1));
2561
2562  __ Bind(&done);
2563}
2564
2565void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
2566  LocationSummary* locations =
2567      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2568  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2569    locations->SetInAt(i, Location::Any());
2570  }
2571  locations->SetOut(Location::Any());
2572}
2573
2574void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
2575  UNUSED(instruction);
2576  LOG(FATAL) << "Unreachable";
2577}
2578
2579void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2580  LocationSummary* locations =
2581      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2582  locations->SetInAt(0, Location::RequiresRegister());
2583  Primitive::Type field_type = instruction->GetFieldType();
2584  bool needs_write_barrier =
2585    CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
2586
2587  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
2588      || (field_type == Primitive::kPrimByte);
2589  // The register allocator does not support multiple
2590  // inputs that die at entry with one in a specific register.
2591  if (is_byte_type) {
2592    // Ensure the value is in a byte register.
2593    locations->SetInAt(1, Location::RegisterLocation(EAX));
2594  } else {
2595    locations->SetInAt(1, Location::RequiresRegister());
2596  }
2597  // Temporary registers for the write barrier.
2598  if (needs_write_barrier) {
2599    locations->AddTemp(Location::RequiresRegister());
2600    // Ensure the card is in a byte register.
2601    locations->AddTemp(Location::RegisterLocation(ECX));
2602  }
2603}
2604
2605void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2606  LocationSummary* locations = instruction->GetLocations();
2607  Register obj = locations->InAt(0).AsRegister<Register>();
2608  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2609  Primitive::Type field_type = instruction->GetFieldType();
2610
2611  switch (field_type) {
2612    case Primitive::kPrimBoolean:
2613    case Primitive::kPrimByte: {
2614      ByteRegister value = locations->InAt(1).AsRegister<ByteRegister>();
2615      __ movb(Address(obj, offset), value);
2616      break;
2617    }
2618
2619    case Primitive::kPrimShort:
2620    case Primitive::kPrimChar: {
2621      Register value = locations->InAt(1).AsRegister<Register>();
2622      __ movw(Address(obj, offset), value);
2623      break;
2624    }
2625
2626    case Primitive::kPrimInt:
2627    case Primitive::kPrimNot: {
2628      Register value = locations->InAt(1).AsRegister<Register>();
2629      __ movl(Address(obj, offset), value);
2630
2631      if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2632        Register temp = locations->GetTemp(0).AsRegister<Register>();
2633        Register card = locations->GetTemp(1).AsRegister<Register>();
2634        codegen_->MarkGCCard(temp, card, obj, value);
2635      }
2636      break;
2637    }
2638
2639    case Primitive::kPrimLong: {
2640      Location value = locations->InAt(1);
2641      __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
2642      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
2643      break;
2644    }
2645
2646    case Primitive::kPrimFloat: {
2647      XmmRegister value = locations->InAt(1).AsFpuRegister<XmmRegister>();
2648      __ movss(Address(obj, offset), value);
2649      break;
2650    }
2651
2652    case Primitive::kPrimDouble: {
2653      XmmRegister value = locations->InAt(1).AsFpuRegister<XmmRegister>();
2654      __ movsd(Address(obj, offset), value);
2655      break;
2656    }
2657
2658    case Primitive::kPrimVoid:
2659      LOG(FATAL) << "Unreachable type " << field_type;
2660      UNREACHABLE();
2661  }
2662}
2663
2664void CodeGeneratorX86::MarkGCCard(Register temp, Register card, Register object, Register value) {
2665  Label is_null;
2666  __ testl(value, value);
2667  __ j(kEqual, &is_null);
2668  __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
2669  __ movl(temp, object);
2670  __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
2671  __ movb(Address(temp, card, TIMES_1, 0),
2672          X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
2673  __ Bind(&is_null);
2674}
2675
2676void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2677  LocationSummary* locations =
2678      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2679  locations->SetInAt(0, Location::RequiresRegister());
2680  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2681}
2682
2683void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2684  LocationSummary* locations = instruction->GetLocations();
2685  Register obj = locations->InAt(0).AsRegister<Register>();
2686  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2687
2688  switch (instruction->GetType()) {
2689    case Primitive::kPrimBoolean: {
2690      Register out = locations->Out().AsRegister<Register>();
2691      __ movzxb(out, Address(obj, offset));
2692      break;
2693    }
2694
2695    case Primitive::kPrimByte: {
2696      Register out = locations->Out().AsRegister<Register>();
2697      __ movsxb(out, Address(obj, offset));
2698      break;
2699    }
2700
2701    case Primitive::kPrimShort: {
2702      Register out = locations->Out().AsRegister<Register>();
2703      __ movsxw(out, Address(obj, offset));
2704      break;
2705    }
2706
2707    case Primitive::kPrimChar: {
2708      Register out = locations->Out().AsRegister<Register>();
2709      __ movzxw(out, Address(obj, offset));
2710      break;
2711    }
2712
2713    case Primitive::kPrimInt:
2714    case Primitive::kPrimNot: {
2715      Register out = locations->Out().AsRegister<Register>();
2716      __ movl(out, Address(obj, offset));
2717      break;
2718    }
2719
2720    case Primitive::kPrimLong: {
2721      // TODO: support volatile.
2722      __ movl(locations->Out().AsRegisterPairLow<Register>(), Address(obj, offset));
2723      __ movl(locations->Out().AsRegisterPairHigh<Register>(), Address(obj, kX86WordSize + offset));
2724      break;
2725    }
2726
2727    case Primitive::kPrimFloat: {
2728      XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
2729      __ movss(out, Address(obj, offset));
2730      break;
2731    }
2732
2733    case Primitive::kPrimDouble: {
2734      XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
2735      __ movsd(out, Address(obj, offset));
2736      break;
2737    }
2738
2739    case Primitive::kPrimVoid:
2740      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2741      UNREACHABLE();
2742  }
2743}
2744
2745void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
2746  LocationSummary* locations =
2747      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2748  locations->SetInAt(0, Location::Any());
2749  if (instruction->HasUses()) {
2750    locations->SetOut(Location::SameAsFirstInput());
2751  }
2752}
2753
2754void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
2755  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction);
2756  codegen_->AddSlowPath(slow_path);
2757
2758  LocationSummary* locations = instruction->GetLocations();
2759  Location obj = locations->InAt(0);
2760
2761  if (obj.IsRegister()) {
2762    __ cmpl(obj.AsRegister<Register>(), Immediate(0));
2763  } else if (obj.IsStackSlot()) {
2764    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
2765  } else {
2766    DCHECK(obj.IsConstant()) << obj;
2767    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2768    __ jmp(slow_path->GetEntryLabel());
2769    return;
2770  }
2771  __ j(kEqual, slow_path->GetEntryLabel());
2772}
2773
2774void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
2775  LocationSummary* locations =
2776      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2777  locations->SetInAt(0, Location::RequiresRegister());
2778  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2779  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2780}
2781
2782void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
2783  LocationSummary* locations = instruction->GetLocations();
2784  Register obj = locations->InAt(0).AsRegister<Register>();
2785  Location index = locations->InAt(1);
2786
2787  switch (instruction->GetType()) {
2788    case Primitive::kPrimBoolean: {
2789      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2790      Register out = locations->Out().AsRegister<Register>();
2791      if (index.IsConstant()) {
2792        __ movzxb(out, Address(obj,
2793            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
2794      } else {
2795        __ movzxb(out, Address(obj, index.AsRegister<Register>(), TIMES_1, data_offset));
2796      }
2797      break;
2798    }
2799
2800    case Primitive::kPrimByte: {
2801      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2802      Register out = locations->Out().AsRegister<Register>();
2803      if (index.IsConstant()) {
2804        __ movsxb(out, Address(obj,
2805            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
2806      } else {
2807        __ movsxb(out, Address(obj, index.AsRegister<Register>(), TIMES_1, data_offset));
2808      }
2809      break;
2810    }
2811
2812    case Primitive::kPrimShort: {
2813      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2814      Register out = locations->Out().AsRegister<Register>();
2815      if (index.IsConstant()) {
2816        __ movsxw(out, Address(obj,
2817            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
2818      } else {
2819        __ movsxw(out, Address(obj, index.AsRegister<Register>(), TIMES_2, data_offset));
2820      }
2821      break;
2822    }
2823
2824    case Primitive::kPrimChar: {
2825      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2826      Register out = locations->Out().AsRegister<Register>();
2827      if (index.IsConstant()) {
2828        __ movzxw(out, Address(obj,
2829            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
2830      } else {
2831        __ movzxw(out, Address(obj, index.AsRegister<Register>(), TIMES_2, data_offset));
2832      }
2833      break;
2834    }
2835
2836    case Primitive::kPrimInt:
2837    case Primitive::kPrimNot: {
2838      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2839      Register out = locations->Out().AsRegister<Register>();
2840      if (index.IsConstant()) {
2841        __ movl(out, Address(obj,
2842            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
2843      } else {
2844        __ movl(out, Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset));
2845      }
2846      break;
2847    }
2848
2849    case Primitive::kPrimLong: {
2850      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2851      Location out = locations->Out();
2852      if (index.IsConstant()) {
2853        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2854        __ movl(out.AsRegisterPairLow<Register>(), Address(obj, offset));
2855        __ movl(out.AsRegisterPairHigh<Register>(), Address(obj, offset + kX86WordSize));
2856      } else {
2857        __ movl(out.AsRegisterPairLow<Register>(),
2858                Address(obj, index.AsRegister<Register>(), TIMES_8, data_offset));
2859        __ movl(out.AsRegisterPairHigh<Register>(),
2860                Address(obj, index.AsRegister<Register>(), TIMES_8, data_offset + kX86WordSize));
2861      }
2862      break;
2863    }
2864
2865    case Primitive::kPrimFloat:
2866    case Primitive::kPrimDouble:
2867      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2868      UNREACHABLE();
2869    case Primitive::kPrimVoid:
2870      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2871      UNREACHABLE();
2872  }
2873}
2874
2875void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
2876  Primitive::Type value_type = instruction->GetComponentType();
2877  bool needs_write_barrier =
2878      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2879
2880  DCHECK(kFollowsQuickABI);
2881  bool not_enough_registers = needs_write_barrier
2882      && !instruction->GetValue()->IsConstant()
2883      && !instruction->GetIndex()->IsConstant();
2884  bool needs_runtime_call = instruction->NeedsTypeCheck() || not_enough_registers;
2885
2886  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2887      instruction,
2888      needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
2889
2890  if (needs_runtime_call) {
2891    InvokeRuntimeCallingConvention calling_convention;
2892    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2893    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2894    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2895  } else {
2896    bool is_byte_type = (value_type == Primitive::kPrimBoolean)
2897        || (value_type == Primitive::kPrimByte);
2898    // We need the inputs to be different than the output in case of long operation.
2899    // In case of a byte operation, the register allocator does not support multiple
2900    // inputs that die at entry with one in a specific register.
2901    locations->SetInAt(0, Location::RequiresRegister());
2902    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2903    if (is_byte_type) {
2904      // Ensure the value is in a byte register.
2905      locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
2906    } else {
2907      locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
2908    }
2909    // Temporary registers for the write barrier.
2910    if (needs_write_barrier) {
2911      locations->AddTemp(Location::RequiresRegister());
2912      // Ensure the card is in a byte register.
2913      locations->AddTemp(Location::RegisterLocation(ECX));
2914    }
2915  }
2916}
2917
2918void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
2919  LocationSummary* locations = instruction->GetLocations();
2920  Register obj = locations->InAt(0).AsRegister<Register>();
2921  Location index = locations->InAt(1);
2922  Location value = locations->InAt(2);
2923  Primitive::Type value_type = instruction->GetComponentType();
2924  bool needs_runtime_call = locations->WillCall();
2925  bool needs_write_barrier =
2926      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2927
2928  switch (value_type) {
2929    case Primitive::kPrimBoolean:
2930    case Primitive::kPrimByte: {
2931      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2932      if (index.IsConstant()) {
2933        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2934        if (value.IsRegister()) {
2935          __ movb(Address(obj, offset), value.AsRegister<ByteRegister>());
2936        } else {
2937          __ movb(Address(obj, offset),
2938                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2939        }
2940      } else {
2941        if (value.IsRegister()) {
2942          __ movb(Address(obj, index.AsRegister<Register>(), TIMES_1, data_offset),
2943                  value.AsRegister<ByteRegister>());
2944        } else {
2945          __ movb(Address(obj, index.AsRegister<Register>(), TIMES_1, data_offset),
2946                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2947        }
2948      }
2949      break;
2950    }
2951
2952    case Primitive::kPrimShort:
2953    case Primitive::kPrimChar: {
2954      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2955      if (index.IsConstant()) {
2956        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2957        if (value.IsRegister()) {
2958          __ movw(Address(obj, offset), value.AsRegister<Register>());
2959        } else {
2960          __ movw(Address(obj, offset),
2961                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2962        }
2963      } else {
2964        if (value.IsRegister()) {
2965          __ movw(Address(obj, index.AsRegister<Register>(), TIMES_2, data_offset),
2966                  value.AsRegister<Register>());
2967        } else {
2968          __ movw(Address(obj, index.AsRegister<Register>(), TIMES_2, data_offset),
2969                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2970        }
2971      }
2972      break;
2973    }
2974
2975    case Primitive::kPrimInt:
2976    case Primitive::kPrimNot: {
2977      if (!needs_runtime_call) {
2978        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2979        if (index.IsConstant()) {
2980          size_t offset =
2981              (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2982          if (value.IsRegister()) {
2983            __ movl(Address(obj, offset), value.AsRegister<Register>());
2984          } else {
2985            DCHECK(value.IsConstant()) << value;
2986            __ movl(Address(obj, offset),
2987                    Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2988          }
2989        } else {
2990          DCHECK(index.IsRegister()) << index;
2991          if (value.IsRegister()) {
2992            __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset),
2993                    value.AsRegister<Register>());
2994          } else {
2995            DCHECK(value.IsConstant()) << value;
2996            __ movl(Address(obj, index.AsRegister<Register>(), TIMES_4, data_offset),
2997                    Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2998          }
2999        }
3000
3001        if (needs_write_barrier) {
3002          Register temp = locations->GetTemp(0).AsRegister<Register>();
3003          Register card = locations->GetTemp(1).AsRegister<Register>();
3004          codegen_->MarkGCCard(temp, card, obj, value.AsRegister<Register>());
3005        }
3006      } else {
3007        DCHECK_EQ(value_type, Primitive::kPrimNot);
3008        DCHECK(!codegen_->IsLeafMethod());
3009        __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
3010        codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3011      }
3012      break;
3013    }
3014
3015    case Primitive::kPrimLong: {
3016      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
3017      if (index.IsConstant()) {
3018        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
3019        if (value.IsRegisterPair()) {
3020          __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
3021          __ movl(Address(obj, offset + kX86WordSize), value.AsRegisterPairHigh<Register>());
3022        } else {
3023          DCHECK(value.IsConstant());
3024          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
3025          __ movl(Address(obj, offset), Immediate(Low32Bits(val)));
3026          __ movl(Address(obj, offset + kX86WordSize), Immediate(High32Bits(val)));
3027        }
3028      } else {
3029        if (value.IsRegisterPair()) {
3030          __ movl(Address(obj, index.AsRegister<Register>(), TIMES_8, data_offset),
3031                  value.AsRegisterPairLow<Register>());
3032          __ movl(Address(obj, index.AsRegister<Register>(), TIMES_8, data_offset + kX86WordSize),
3033                  value.AsRegisterPairHigh<Register>());
3034        } else {
3035          DCHECK(value.IsConstant());
3036          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
3037          __ movl(Address(obj, index.AsRegister<Register>(), TIMES_8, data_offset),
3038                  Immediate(Low32Bits(val)));
3039          __ movl(Address(obj, index.AsRegister<Register>(), TIMES_8, data_offset + kX86WordSize),
3040                  Immediate(High32Bits(val)));
3041        }
3042      }
3043      break;
3044    }
3045
3046    case Primitive::kPrimFloat:
3047    case Primitive::kPrimDouble:
3048      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
3049      UNREACHABLE();
3050    case Primitive::kPrimVoid:
3051      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3052      UNREACHABLE();
3053  }
3054}
3055
3056void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
3057  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
3058  locations->SetInAt(0, Location::RequiresRegister());
3059  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3060  instruction->SetLocations(locations);
3061}
3062
3063void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
3064  LocationSummary* locations = instruction->GetLocations();
3065  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
3066  Register obj = locations->InAt(0).AsRegister<Register>();
3067  Register out = locations->Out().AsRegister<Register>();
3068  __ movl(out, Address(obj, offset));
3069}
3070
3071void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
3072  LocationSummary* locations =
3073      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3074  locations->SetInAt(0, Location::RequiresRegister());
3075  locations->SetInAt(1, Location::RequiresRegister());
3076  if (instruction->HasUses()) {
3077    locations->SetOut(Location::SameAsFirstInput());
3078  }
3079}
3080
3081void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
3082  LocationSummary* locations = instruction->GetLocations();
3083  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(
3084      instruction, locations->InAt(0), locations->InAt(1));
3085  codegen_->AddSlowPath(slow_path);
3086
3087  Register index = locations->InAt(0).AsRegister<Register>();
3088  Register length = locations->InAt(1).AsRegister<Register>();
3089
3090  __ cmpl(index, length);
3091  __ j(kAboveEqual, slow_path->GetEntryLabel());
3092}
3093
3094void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
3095  temp->SetLocations(nullptr);
3096}
3097
3098void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
3099  // Nothing to do, this is driven by the code generator.
3100  UNUSED(temp);
3101}
3102
3103void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
3104  UNUSED(instruction);
3105  LOG(FATAL) << "Unreachable";
3106}
3107
3108void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
3109  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
3110}
3111
3112void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
3113  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
3114}
3115
3116void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
3117  HBasicBlock* block = instruction->GetBlock();
3118  if (block->GetLoopInformation() != nullptr) {
3119    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
3120    // The back edge will generate the suspend check.
3121    return;
3122  }
3123  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
3124    // The goto will generate the suspend check.
3125    return;
3126  }
3127  GenerateSuspendCheck(instruction, nullptr);
3128}
3129
3130void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
3131                                                       HBasicBlock* successor) {
3132  SuspendCheckSlowPathX86* slow_path =
3133      new (GetGraph()->GetArena()) SuspendCheckSlowPathX86(instruction, successor);
3134  codegen_->AddSlowPath(slow_path);
3135  __ fs()->cmpw(Address::Absolute(
3136      Thread::ThreadFlagsOffset<kX86WordSize>().Int32Value()), Immediate(0));
3137  if (successor == nullptr) {
3138    __ j(kNotEqual, slow_path->GetEntryLabel());
3139    __ Bind(slow_path->GetReturnLabel());
3140  } else {
3141    __ j(kEqual, codegen_->GetLabelOf(successor));
3142    __ jmp(slow_path->GetEntryLabel());
3143  }
3144}
3145
3146X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
3147  return codegen_->GetAssembler();
3148}
3149
3150void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
3151  ScratchRegisterScope ensure_scratch(
3152      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
3153  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
3154  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
3155  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
3156}
3157
3158void ParallelMoveResolverX86::EmitMove(size_t index) {
3159  MoveOperands* move = moves_.Get(index);
3160  Location source = move->GetSource();
3161  Location destination = move->GetDestination();
3162
3163  if (source.IsRegister()) {
3164    if (destination.IsRegister()) {
3165      __ movl(destination.AsRegister<Register>(), source.AsRegister<Register>());
3166    } else {
3167      DCHECK(destination.IsStackSlot());
3168      __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegister<Register>());
3169    }
3170  } else if (source.IsStackSlot()) {
3171    if (destination.IsRegister()) {
3172      __ movl(destination.AsRegister<Register>(), Address(ESP, source.GetStackIndex()));
3173    } else {
3174      DCHECK(destination.IsStackSlot());
3175      MoveMemoryToMemory(destination.GetStackIndex(),
3176                         source.GetStackIndex());
3177    }
3178  } else if (source.IsConstant()) {
3179    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
3180    Immediate imm(instruction->AsIntConstant()->GetValue());
3181    if (destination.IsRegister()) {
3182      __ movl(destination.AsRegister<Register>(), imm);
3183    } else {
3184      __ movl(Address(ESP, destination.GetStackIndex()), imm);
3185    }
3186  } else {
3187    LOG(FATAL) << "Unimplemented";
3188  }
3189}
3190
3191void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
3192  Register suggested_scratch = reg == EAX ? EBX : EAX;
3193  ScratchRegisterScope ensure_scratch(
3194      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
3195
3196  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
3197  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
3198  __ movl(Address(ESP, mem + stack_offset), reg);
3199  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
3200}
3201
3202void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
3203  ScratchRegisterScope ensure_scratch1(
3204      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
3205
3206  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
3207  ScratchRegisterScope ensure_scratch2(
3208      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
3209
3210  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
3211  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
3212  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
3213  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
3214  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
3215  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
3216}
3217
3218void ParallelMoveResolverX86::EmitSwap(size_t index) {
3219  MoveOperands* move = moves_.Get(index);
3220  Location source = move->GetSource();
3221  Location destination = move->GetDestination();
3222
3223  if (source.IsRegister() && destination.IsRegister()) {
3224    __ xchgl(destination.AsRegister<Register>(), source.AsRegister<Register>());
3225  } else if (source.IsRegister() && destination.IsStackSlot()) {
3226    Exchange(source.AsRegister<Register>(), destination.GetStackIndex());
3227  } else if (source.IsStackSlot() && destination.IsRegister()) {
3228    Exchange(destination.AsRegister<Register>(), source.GetStackIndex());
3229  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3230    Exchange(destination.GetStackIndex(), source.GetStackIndex());
3231  } else {
3232    LOG(FATAL) << "Unimplemented";
3233  }
3234}
3235
3236void ParallelMoveResolverX86::SpillScratch(int reg) {
3237  __ pushl(static_cast<Register>(reg));
3238}
3239
3240void ParallelMoveResolverX86::RestoreScratch(int reg) {
3241  __ popl(static_cast<Register>(reg));
3242}
3243
3244void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
3245  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3246      ? LocationSummary::kCallOnSlowPath
3247      : LocationSummary::kNoCall;
3248  LocationSummary* locations =
3249      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3250  locations->SetOut(Location::RequiresRegister());
3251}
3252
3253void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
3254  Register out = cls->GetLocations()->Out().AsRegister<Register>();
3255  if (cls->IsReferrersClass()) {
3256    DCHECK(!cls->CanCallRuntime());
3257    DCHECK(!cls->MustGenerateClinitCheck());
3258    codegen_->LoadCurrentMethod(out);
3259    __ movl(out, Address(out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()));
3260  } else {
3261    DCHECK(cls->CanCallRuntime());
3262    codegen_->LoadCurrentMethod(out);
3263    __ movl(out, Address(out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
3264    __ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
3265
3266    SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
3267        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3268    codegen_->AddSlowPath(slow_path);
3269    __ testl(out, out);
3270    __ j(kEqual, slow_path->GetEntryLabel());
3271    if (cls->MustGenerateClinitCheck()) {
3272      GenerateClassInitializationCheck(slow_path, out);
3273    } else {
3274      __ Bind(slow_path->GetExitLabel());
3275    }
3276  }
3277}
3278
3279void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
3280  LocationSummary* locations =
3281      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3282  locations->SetInAt(0, Location::RequiresRegister());
3283  if (check->HasUses()) {
3284    locations->SetOut(Location::SameAsFirstInput());
3285  }
3286}
3287
3288void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
3289  // We assume the class to not be null.
3290  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
3291      check->GetLoadClass(), check, check->GetDexPc(), true);
3292  codegen_->AddSlowPath(slow_path);
3293  GenerateClassInitializationCheck(slow_path,
3294                                   check->GetLocations()->InAt(0).AsRegister<Register>());
3295}
3296
3297void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
3298    SlowPathCodeX86* slow_path, Register class_reg) {
3299  __ cmpl(Address(class_reg,  mirror::Class::StatusOffset().Int32Value()),
3300          Immediate(mirror::Class::kStatusInitialized));
3301  __ j(kLess, slow_path->GetEntryLabel());
3302  __ Bind(slow_path->GetExitLabel());
3303  // No need for memory fence, thanks to the X86 memory model.
3304}
3305
3306void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3307  LocationSummary* locations =
3308      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3309  locations->SetInAt(0, Location::RequiresRegister());
3310  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3311}
3312
3313void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3314  LocationSummary* locations = instruction->GetLocations();
3315  Register cls = locations->InAt(0).AsRegister<Register>();
3316  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
3317
3318  switch (instruction->GetType()) {
3319    case Primitive::kPrimBoolean: {
3320      Register out = locations->Out().AsRegister<Register>();
3321      __ movzxb(out, Address(cls, offset));
3322      break;
3323    }
3324
3325    case Primitive::kPrimByte: {
3326      Register out = locations->Out().AsRegister<Register>();
3327      __ movsxb(out, Address(cls, offset));
3328      break;
3329    }
3330
3331    case Primitive::kPrimShort: {
3332      Register out = locations->Out().AsRegister<Register>();
3333      __ movsxw(out, Address(cls, offset));
3334      break;
3335    }
3336
3337    case Primitive::kPrimChar: {
3338      Register out = locations->Out().AsRegister<Register>();
3339      __ movzxw(out, Address(cls, offset));
3340      break;
3341    }
3342
3343    case Primitive::kPrimInt:
3344    case Primitive::kPrimNot: {
3345      Register out = locations->Out().AsRegister<Register>();
3346      __ movl(out, Address(cls, offset));
3347      break;
3348    }
3349
3350    case Primitive::kPrimLong: {
3351      // TODO: support volatile.
3352      __ movl(locations->Out().AsRegisterPairLow<Register>(), Address(cls, offset));
3353      __ movl(locations->Out().AsRegisterPairHigh<Register>(), Address(cls, kX86WordSize + offset));
3354      break;
3355    }
3356
3357    case Primitive::kPrimFloat: {
3358      XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
3359      __ movss(out, Address(cls, offset));
3360      break;
3361    }
3362
3363    case Primitive::kPrimDouble: {
3364      XmmRegister out = locations->Out().AsFpuRegister<XmmRegister>();
3365      __ movsd(out, Address(cls, offset));
3366      break;
3367    }
3368
3369    case Primitive::kPrimVoid:
3370      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3371      UNREACHABLE();
3372  }
3373}
3374
3375void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3376  LocationSummary* locations =
3377      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3378  locations->SetInAt(0, Location::RequiresRegister());
3379  Primitive::Type field_type = instruction->GetFieldType();
3380  bool needs_write_barrier =
3381      CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
3382  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
3383      || (field_type == Primitive::kPrimByte);
3384  // The register allocator does not support multiple
3385  // inputs that die at entry with one in a specific register.
3386  if (is_byte_type) {
3387    // Ensure the value is in a byte register.
3388    locations->SetInAt(1, Location::RegisterLocation(EAX));
3389  } else {
3390    locations->SetInAt(1, Location::RequiresRegister());
3391  }
3392  // Temporary registers for the write barrier.
3393  if (needs_write_barrier) {
3394    locations->AddTemp(Location::RequiresRegister());
3395    // Ensure the card is in a byte register.
3396    locations->AddTemp(Location::RegisterLocation(ECX));
3397  }
3398}
3399
3400void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3401  LocationSummary* locations = instruction->GetLocations();
3402  Register cls = locations->InAt(0).AsRegister<Register>();
3403  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
3404  Primitive::Type field_type = instruction->GetFieldType();
3405
3406  switch (field_type) {
3407    case Primitive::kPrimBoolean:
3408    case Primitive::kPrimByte: {
3409      ByteRegister value = locations->InAt(1).AsRegister<ByteRegister>();
3410      __ movb(Address(cls, offset), value);
3411      break;
3412    }
3413
3414    case Primitive::kPrimShort:
3415    case Primitive::kPrimChar: {
3416      Register value = locations->InAt(1).AsRegister<Register>();
3417      __ movw(Address(cls, offset), value);
3418      break;
3419    }
3420
3421    case Primitive::kPrimInt:
3422    case Primitive::kPrimNot: {
3423      Register value = locations->InAt(1).AsRegister<Register>();
3424      __ movl(Address(cls, offset), value);
3425
3426      if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
3427        Register temp = locations->GetTemp(0).AsRegister<Register>();
3428        Register card = locations->GetTemp(1).AsRegister<Register>();
3429        codegen_->MarkGCCard(temp, card, cls, value);
3430      }
3431      break;
3432    }
3433
3434    case Primitive::kPrimLong: {
3435      Location value = locations->InAt(1);
3436      __ movl(Address(cls, offset), value.AsRegisterPairLow<Register>());
3437      __ movl(Address(cls, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
3438      break;
3439    }
3440
3441    case Primitive::kPrimFloat: {
3442      XmmRegister value = locations->InAt(1).AsFpuRegister<XmmRegister>();
3443      __ movss(Address(cls, offset), value);
3444      break;
3445    }
3446
3447    case Primitive::kPrimDouble: {
3448      XmmRegister value = locations->InAt(1).AsFpuRegister<XmmRegister>();
3449      __ movsd(Address(cls, offset), value);
3450      break;
3451    }
3452
3453    case Primitive::kPrimVoid:
3454      LOG(FATAL) << "Unreachable type " << field_type;
3455      UNREACHABLE();
3456  }
3457}
3458
3459void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
3460  LocationSummary* locations =
3461      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3462  locations->SetOut(Location::RequiresRegister());
3463}
3464
3465void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) {
3466  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
3467  codegen_->AddSlowPath(slow_path);
3468
3469  Register out = load->GetLocations()->Out().AsRegister<Register>();
3470  codegen_->LoadCurrentMethod(out);
3471  __ movl(out, Address(out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()));
3472  __ movl(out, Address(out, mirror::Class::DexCacheStringsOffset().Int32Value()));
3473  __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
3474  __ testl(out, out);
3475  __ j(kEqual, slow_path->GetEntryLabel());
3476  __ Bind(slow_path->GetExitLabel());
3477}
3478
3479void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
3480  LocationSummary* locations =
3481      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3482  locations->SetOut(Location::RequiresRegister());
3483}
3484
3485void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
3486  Address address = Address::Absolute(Thread::ExceptionOffset<kX86WordSize>().Int32Value());
3487  __ fs()->movl(load->GetLocations()->Out().AsRegister<Register>(), address);
3488  __ fs()->movl(address, Immediate(0));
3489}
3490
3491void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
3492  LocationSummary* locations =
3493      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3494  InvokeRuntimeCallingConvention calling_convention;
3495  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3496}
3497
3498void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
3499  __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pDeliverException)));
3500  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3501}
3502
3503void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
3504  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3505      ? LocationSummary::kNoCall
3506      : LocationSummary::kCallOnSlowPath;
3507  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3508  locations->SetInAt(0, Location::RequiresRegister());
3509  locations->SetInAt(1, Location::Any());
3510  locations->SetOut(Location::RequiresRegister());
3511}
3512
3513void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
3514  LocationSummary* locations = instruction->GetLocations();
3515  Register obj = locations->InAt(0).AsRegister<Register>();
3516  Location cls = locations->InAt(1);
3517  Register out = locations->Out().AsRegister<Register>();
3518  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3519  Label done, zero;
3520  SlowPathCodeX86* slow_path = nullptr;
3521
3522  // Return 0 if `obj` is null.
3523  // TODO: avoid this check if we know obj is not null.
3524  __ testl(obj, obj);
3525  __ j(kEqual, &zero);
3526  __ movl(out, Address(obj, class_offset));
3527  // Compare the class of `obj` with `cls`.
3528  if (cls.IsRegister()) {
3529    __ cmpl(out, cls.AsRegister<Register>());
3530  } else {
3531    DCHECK(cls.IsStackSlot()) << cls;
3532    __ cmpl(out, Address(ESP, cls.GetStackIndex()));
3533  }
3534
3535  if (instruction->IsClassFinal()) {
3536    // Classes must be equal for the instanceof to succeed.
3537    __ j(kNotEqual, &zero);
3538    __ movl(out, Immediate(1));
3539    __ jmp(&done);
3540  } else {
3541    // If the classes are not equal, we go into a slow path.
3542    DCHECK(locations->OnlyCallsOnSlowPath());
3543    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
3544        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3545    codegen_->AddSlowPath(slow_path);
3546    __ j(kNotEqual, slow_path->GetEntryLabel());
3547    __ movl(out, Immediate(1));
3548    __ jmp(&done);
3549  }
3550  __ Bind(&zero);
3551  __ movl(out, Immediate(0));
3552  if (slow_path != nullptr) {
3553    __ Bind(slow_path->GetExitLabel());
3554  }
3555  __ Bind(&done);
3556}
3557
3558void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
3559  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3560      instruction, LocationSummary::kCallOnSlowPath);
3561  locations->SetInAt(0, Location::RequiresRegister());
3562  locations->SetInAt(1, Location::Any());
3563  locations->AddTemp(Location::RequiresRegister());
3564}
3565
3566void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
3567  LocationSummary* locations = instruction->GetLocations();
3568  Register obj = locations->InAt(0).AsRegister<Register>();
3569  Location cls = locations->InAt(1);
3570  Register temp = locations->GetTemp(0).AsRegister<Register>();
3571  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3572  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
3573      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3574  codegen_->AddSlowPath(slow_path);
3575
3576  // TODO: avoid this check if we know obj is not null.
3577  __ testl(obj, obj);
3578  __ j(kEqual, slow_path->GetExitLabel());
3579  __ movl(temp, Address(obj, class_offset));
3580
3581  // Compare the class of `obj` with `cls`.
3582  if (cls.IsRegister()) {
3583    __ cmpl(temp, cls.AsRegister<Register>());
3584  } else {
3585    DCHECK(cls.IsStackSlot()) << cls;
3586    __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
3587  }
3588
3589  __ j(kNotEqual, slow_path->GetEntryLabel());
3590  __ Bind(slow_path->GetExitLabel());
3591}
3592
3593void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
3594  LocationSummary* locations =
3595      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3596  InvokeRuntimeCallingConvention calling_convention;
3597  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3598}
3599
3600void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
3601  __ fs()->call(Address::Absolute(instruction->IsEnter()
3602        ? QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLockObject)
3603        : QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pUnlockObject)));
3604  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3605}
3606
3607void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3608void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3609void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3610
3611void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
3612  LocationSummary* locations =
3613      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3614  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3615         || instruction->GetResultType() == Primitive::kPrimLong);
3616  locations->SetInAt(0, Location::RequiresRegister());
3617  locations->SetInAt(1, Location::Any());
3618  locations->SetOut(Location::SameAsFirstInput());
3619}
3620
3621void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
3622  HandleBitwiseOperation(instruction);
3623}
3624
3625void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
3626  HandleBitwiseOperation(instruction);
3627}
3628
3629void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
3630  HandleBitwiseOperation(instruction);
3631}
3632
3633void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
3634  LocationSummary* locations = instruction->GetLocations();
3635  Location first = locations->InAt(0);
3636  Location second = locations->InAt(1);
3637  DCHECK(first.Equals(locations->Out()));
3638
3639  if (instruction->GetResultType() == Primitive::kPrimInt) {
3640    if (second.IsRegister()) {
3641      if (instruction->IsAnd()) {
3642        __ andl(first.AsRegister<Register>(), second.AsRegister<Register>());
3643      } else if (instruction->IsOr()) {
3644        __ orl(first.AsRegister<Register>(), second.AsRegister<Register>());
3645      } else {
3646        DCHECK(instruction->IsXor());
3647        __ xorl(first.AsRegister<Register>(), second.AsRegister<Register>());
3648      }
3649    } else if (second.IsConstant()) {
3650      if (instruction->IsAnd()) {
3651        __ andl(first.AsRegister<Register>(),
3652                Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3653      } else if (instruction->IsOr()) {
3654        __ orl(first.AsRegister<Register>(),
3655               Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3656      } else {
3657        DCHECK(instruction->IsXor());
3658        __ xorl(first.AsRegister<Register>(),
3659                Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3660      }
3661    } else {
3662      if (instruction->IsAnd()) {
3663        __ andl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
3664      } else if (instruction->IsOr()) {
3665        __ orl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
3666      } else {
3667        DCHECK(instruction->IsXor());
3668        __ xorl(first.AsRegister<Register>(), Address(ESP, second.GetStackIndex()));
3669      }
3670    }
3671  } else {
3672    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3673    if (second.IsRegisterPair()) {
3674      if (instruction->IsAnd()) {
3675        __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3676        __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3677      } else if (instruction->IsOr()) {
3678        __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3679        __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3680      } else {
3681        DCHECK(instruction->IsXor());
3682        __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3683        __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3684      }
3685    } else {
3686      if (instruction->IsAnd()) {
3687        __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3688        __ andl(first.AsRegisterPairHigh<Register>(),
3689                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3690      } else if (instruction->IsOr()) {
3691        __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3692        __ orl(first.AsRegisterPairHigh<Register>(),
3693                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3694      } else {
3695        DCHECK(instruction->IsXor());
3696        __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3697        __ xorl(first.AsRegisterPairHigh<Register>(),
3698                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3699      }
3700    }
3701  }
3702}
3703
3704}  // namespace x86
3705}  // namespace art
3706