code_generator_x86.cc revision 799f506b8d48bcceef5e6cf50f3f5eb6bcea05e1
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/stack_checks.h"
27#include "utils/x86/assembler_x86.h"
28#include "utils/x86/managed_register_x86.h"
29
30namespace art {
31
32namespace x86 {
33
34static constexpr bool kExplicitStackOverflowCheck = false;
35
36static constexpr int kNumberOfPushedRegistersAtEntry = 1;
37static constexpr int kCurrentMethodStackOffset = 0;
38
39static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX, EBX };
40static constexpr size_t kRuntimeParameterCoreRegistersLength =
41    arraysize(kRuntimeParameterCoreRegisters);
42static constexpr XmmRegister kRuntimeParameterFpuRegisters[] = { };
43static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
44
45// Marker for places that can be updated once we don't follow the quick ABI.
46static constexpr bool kFollowsQuickABI = true;
47
48class InvokeRuntimeCallingConvention : public CallingConvention<Register, XmmRegister> {
49 public:
50  InvokeRuntimeCallingConvention()
51      : CallingConvention(kRuntimeParameterCoreRegisters,
52                          kRuntimeParameterCoreRegistersLength,
53                          kRuntimeParameterFpuRegisters,
54                          kRuntimeParameterFpuRegistersLength) {}
55
56 private:
57  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
58};
59
60#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
61
62class SlowPathCodeX86 : public SlowPathCode {
63 public:
64  SlowPathCodeX86() : entry_label_(), exit_label_() {}
65
66  Label* GetEntryLabel() { return &entry_label_; }
67  Label* GetExitLabel() { return &exit_label_; }
68
69 private:
70  Label entry_label_;
71  Label exit_label_;
72
73  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeX86);
74};
75
76class NullCheckSlowPathX86 : public SlowPathCodeX86 {
77 public:
78  explicit NullCheckSlowPathX86(HNullCheck* instruction) : instruction_(instruction) {}
79
80  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
81    __ Bind(GetEntryLabel());
82    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
83    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
84  }
85
86 private:
87  HNullCheck* const instruction_;
88  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
89};
90
91class DivZeroCheckSlowPathX86 : public SlowPathCodeX86 {
92 public:
93  explicit DivZeroCheckSlowPathX86(HDivZeroCheck* instruction) : instruction_(instruction) {}
94
95  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
96    __ Bind(GetEntryLabel());
97    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowDivZero)));
98    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
99  }
100
101 private:
102  HDivZeroCheck* const instruction_;
103  DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathX86);
104};
105
106class DivRemMinusOneSlowPathX86 : public SlowPathCodeX86 {
107 public:
108  explicit DivRemMinusOneSlowPathX86(Register reg, bool is_div) : reg_(reg), is_div_(is_div) {}
109
110  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
111    __ Bind(GetEntryLabel());
112    if (is_div_) {
113      __ negl(reg_);
114    } else {
115      __ movl(reg_, Immediate(0));
116    }
117    __ jmp(GetExitLabel());
118  }
119
120 private:
121  Register reg_;
122  bool is_div_;
123  DISALLOW_COPY_AND_ASSIGN(DivRemMinusOneSlowPathX86);
124};
125
126class StackOverflowCheckSlowPathX86 : public SlowPathCodeX86 {
127 public:
128  StackOverflowCheckSlowPathX86() {}
129
130  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
131    __ Bind(GetEntryLabel());
132    __ addl(ESP,
133            Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
134    __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow)));
135  }
136
137 private:
138  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86);
139};
140
141class BoundsCheckSlowPathX86 : public SlowPathCodeX86 {
142 public:
143  BoundsCheckSlowPathX86(HBoundsCheck* instruction,
144                         Location index_location,
145                         Location length_location)
146      : instruction_(instruction), index_location_(index_location), length_location_(length_location) {}
147
148  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
149    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
150    __ Bind(GetEntryLabel());
151    // We're moving two locations to locations that could overlap, so we need a parallel
152    // move resolver.
153    InvokeRuntimeCallingConvention calling_convention;
154    x86_codegen->EmitParallelMoves(
155        index_location_,
156        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
157        length_location_,
158        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
159    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowArrayBounds)));
160    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
161  }
162
163 private:
164  HBoundsCheck* const instruction_;
165  const Location index_location_;
166  const Location length_location_;
167
168  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
169};
170
171class SuspendCheckSlowPathX86 : public SlowPathCodeX86 {
172 public:
173  explicit SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
174      : instruction_(instruction), successor_(successor) {}
175
176  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
177    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
178    __ Bind(GetEntryLabel());
179    codegen->SaveLiveRegisters(instruction_->GetLocations());
180    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pTestSuspend)));
181    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
182    codegen->RestoreLiveRegisters(instruction_->GetLocations());
183    if (successor_ == nullptr) {
184      __ jmp(GetReturnLabel());
185    } else {
186      __ jmp(x86_codegen->GetLabelOf(successor_));
187    }
188  }
189
190  Label* GetReturnLabel() {
191    DCHECK(successor_ == nullptr);
192    return &return_label_;
193  }
194
195 private:
196  HSuspendCheck* const instruction_;
197  HBasicBlock* const successor_;
198  Label return_label_;
199
200  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
201};
202
203class LoadStringSlowPathX86 : public SlowPathCodeX86 {
204 public:
205  explicit LoadStringSlowPathX86(HLoadString* instruction) : instruction_(instruction) {}
206
207  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
208    LocationSummary* locations = instruction_->GetLocations();
209    DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
210
211    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
212    __ Bind(GetEntryLabel());
213    codegen->SaveLiveRegisters(locations);
214
215    InvokeRuntimeCallingConvention calling_convention;
216    x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0));
217    __ movl(calling_convention.GetRegisterAt(1), Immediate(instruction_->GetStringIndex()));
218    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pResolveString)));
219    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
220    x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
221    codegen->RestoreLiveRegisters(locations);
222
223    __ jmp(GetExitLabel());
224  }
225
226 private:
227  HLoadString* const instruction_;
228
229  DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86);
230};
231
232class LoadClassSlowPathX86 : public SlowPathCodeX86 {
233 public:
234  LoadClassSlowPathX86(HLoadClass* cls,
235                       HInstruction* at,
236                       uint32_t dex_pc,
237                       bool do_clinit)
238      : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
239    DCHECK(at->IsLoadClass() || at->IsClinitCheck());
240  }
241
242  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
243    LocationSummary* locations = at_->GetLocations();
244    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
245    __ Bind(GetEntryLabel());
246    codegen->SaveLiveRegisters(locations);
247
248    InvokeRuntimeCallingConvention calling_convention;
249    __ movl(calling_convention.GetRegisterAt(0), Immediate(cls_->GetTypeIndex()));
250    x86_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
251    __ fs()->call(Address::Absolute(do_clinit_
252        ? QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInitializeStaticStorage)
253        : QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInitializeType)));
254    codegen->RecordPcInfo(at_, dex_pc_);
255
256    // Move the class to the desired location.
257    Location out = locations->Out();
258    if (out.IsValid()) {
259      DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
260      x86_codegen->Move32(out, Location::RegisterLocation(EAX));
261    }
262
263    codegen->RestoreLiveRegisters(locations);
264    __ jmp(GetExitLabel());
265  }
266
267 private:
268  // The class this slow path will load.
269  HLoadClass* const cls_;
270
271  // The instruction where this slow path is happening.
272  // (Might be the load class or an initialization check).
273  HInstruction* const at_;
274
275  // The dex PC of `at_`.
276  const uint32_t dex_pc_;
277
278  // Whether to initialize the class.
279  const bool do_clinit_;
280
281  DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86);
282};
283
284class TypeCheckSlowPathX86 : public SlowPathCodeX86 {
285 public:
286  TypeCheckSlowPathX86(HInstruction* instruction,
287                       Location class_to_check,
288                       Location object_class,
289                       uint32_t dex_pc)
290      : instruction_(instruction),
291        class_to_check_(class_to_check),
292        object_class_(object_class),
293        dex_pc_(dex_pc) {}
294
295  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
296    LocationSummary* locations = instruction_->GetLocations();
297    DCHECK(instruction_->IsCheckCast()
298           || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
299
300    CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
301    __ Bind(GetEntryLabel());
302    codegen->SaveLiveRegisters(locations);
303
304    // We're moving two locations to locations that could overlap, so we need a parallel
305    // move resolver.
306    InvokeRuntimeCallingConvention calling_convention;
307    x86_codegen->EmitParallelMoves(
308        class_to_check_,
309        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
310        object_class_,
311        Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
312
313    if (instruction_->IsInstanceOf()) {
314      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pInstanceofNonTrivial)));
315    } else {
316      DCHECK(instruction_->IsCheckCast());
317      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pCheckCast)));
318    }
319
320    codegen->RecordPcInfo(instruction_, dex_pc_);
321    if (instruction_->IsInstanceOf()) {
322      x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
323    }
324    codegen->RestoreLiveRegisters(locations);
325
326    __ jmp(GetExitLabel());
327  }
328
329 private:
330  HInstruction* const instruction_;
331  const Location class_to_check_;
332  const Location object_class_;
333  const uint32_t dex_pc_;
334
335  DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathX86);
336};
337
338#undef __
339#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
340
341inline Condition X86Condition(IfCondition cond) {
342  switch (cond) {
343    case kCondEQ: return kEqual;
344    case kCondNE: return kNotEqual;
345    case kCondLT: return kLess;
346    case kCondLE: return kLessEqual;
347    case kCondGT: return kGreater;
348    case kCondGE: return kGreaterEqual;
349    default:
350      LOG(FATAL) << "Unknown if condition";
351  }
352  return kEqual;
353}
354
355void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
356  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
357}
358
359void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
360  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
361}
362
363size_t CodeGeneratorX86::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
364  __ movl(Address(ESP, stack_index), static_cast<Register>(reg_id));
365  return kX86WordSize;
366}
367
368size_t CodeGeneratorX86::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
369  __ movl(static_cast<Register>(reg_id), Address(ESP, stack_index));
370  return kX86WordSize;
371}
372
373CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
374    : CodeGenerator(graph, kNumberOfCpuRegisters, kNumberOfXmmRegisters, kNumberOfRegisterPairs),
375      block_labels_(graph->GetArena(), 0),
376      location_builder_(graph, this),
377      instruction_visitor_(graph, this),
378      move_resolver_(graph->GetArena(), this) {}
379
380size_t CodeGeneratorX86::FrameEntrySpillSize() const {
381  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
382}
383
384Location CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type) const {
385  switch (type) {
386    case Primitive::kPrimLong: {
387      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
388      X86ManagedRegister pair =
389          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
390      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
391      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
392      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
393      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
394      UpdateBlockedPairRegisters();
395      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
396    }
397
398    case Primitive::kPrimByte:
399    case Primitive::kPrimBoolean:
400    case Primitive::kPrimChar:
401    case Primitive::kPrimShort:
402    case Primitive::kPrimInt:
403    case Primitive::kPrimNot: {
404      Register reg = static_cast<Register>(
405          FindFreeEntry(blocked_core_registers_, kNumberOfCpuRegisters));
406      // Block all register pairs that contain `reg`.
407      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
408        X86ManagedRegister current =
409            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
410        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
411          blocked_register_pairs_[i] = true;
412        }
413      }
414      return Location::RegisterLocation(reg);
415    }
416
417    case Primitive::kPrimFloat:
418    case Primitive::kPrimDouble: {
419      return Location::FpuRegisterLocation(
420          FindFreeEntry(blocked_fpu_registers_, kNumberOfXmmRegisters));
421    }
422
423    case Primitive::kPrimVoid:
424      LOG(FATAL) << "Unreachable type " << type;
425  }
426
427  return Location();
428}
429
430void CodeGeneratorX86::SetupBlockedRegisters() const {
431  // Don't allocate the dalvik style register pair passing.
432  blocked_register_pairs_[ECX_EDX] = true;
433
434  // Stack register is always reserved.
435  blocked_core_registers_[ESP] = true;
436
437  // TODO: We currently don't use Quick's callee saved registers.
438  DCHECK(kFollowsQuickABI);
439  blocked_core_registers_[EBP] = true;
440  blocked_core_registers_[ESI] = true;
441  blocked_core_registers_[EDI] = true;
442
443  UpdateBlockedPairRegisters();
444}
445
446void CodeGeneratorX86::UpdateBlockedPairRegisters() const {
447  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
448    X86ManagedRegister current =
449        X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
450    if (blocked_core_registers_[current.AsRegisterPairLow()]
451        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
452      blocked_register_pairs_[i] = true;
453    }
454  }
455}
456
457InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
458      : HGraphVisitor(graph),
459        assembler_(codegen->GetAssembler()),
460        codegen_(codegen) {}
461
462void CodeGeneratorX86::GenerateFrameEntry() {
463  // Create a fake register to mimic Quick.
464  static const int kFakeReturnRegister = 8;
465  core_spill_mask_ |= (1 << kFakeReturnRegister);
466
467  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
468  if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
469    __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
470    RecordPcInfo(nullptr, 0);
471  }
472
473  // The return PC has already been pushed on the stack.
474  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
475
476  if (!skip_overflow_check && kExplicitStackOverflowCheck) {
477    SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
478    AddSlowPath(slow_path);
479
480    __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>()));
481    __ j(kLess, slow_path->GetEntryLabel());
482  }
483
484  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
485}
486
487void CodeGeneratorX86::GenerateFrameExit() {
488  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
489}
490
491void CodeGeneratorX86::Bind(HBasicBlock* block) {
492  __ Bind(GetLabelOf(block));
493}
494
495void CodeGeneratorX86::LoadCurrentMethod(Register reg) {
496  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
497}
498
499Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
500  switch (load->GetType()) {
501    case Primitive::kPrimLong:
502    case Primitive::kPrimDouble:
503      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
504      break;
505
506    case Primitive::kPrimInt:
507    case Primitive::kPrimNot:
508    case Primitive::kPrimFloat:
509      return Location::StackSlot(GetStackSlot(load->GetLocal()));
510
511    case Primitive::kPrimBoolean:
512    case Primitive::kPrimByte:
513    case Primitive::kPrimChar:
514    case Primitive::kPrimShort:
515    case Primitive::kPrimVoid:
516      LOG(FATAL) << "Unexpected type " << load->GetType();
517  }
518
519  LOG(FATAL) << "Unreachable";
520  return Location();
521}
522
523Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
524  switch (type) {
525    case Primitive::kPrimBoolean:
526    case Primitive::kPrimByte:
527    case Primitive::kPrimChar:
528    case Primitive::kPrimShort:
529    case Primitive::kPrimInt:
530    case Primitive::kPrimFloat:
531    case Primitive::kPrimNot: {
532      uint32_t index = gp_index_++;
533      if (index < calling_convention.GetNumberOfRegisters()) {
534        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
535      } else {
536        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
537      }
538    }
539
540    case Primitive::kPrimLong:
541    case Primitive::kPrimDouble: {
542      uint32_t index = gp_index_;
543      gp_index_ += 2;
544      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
545        X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
546            calling_convention.GetRegisterPairAt(index));
547        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
548      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
549        // On X86, the register index and stack index of a quick parameter is the same, since
550        // we are passing floating pointer values in core registers.
551        return Location::QuickParameter(index, index);
552      } else {
553        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
554      }
555    }
556
557    case Primitive::kPrimVoid:
558      LOG(FATAL) << "Unexpected parameter type " << type;
559      break;
560  }
561  return Location();
562}
563
564void CodeGeneratorX86::Move32(Location destination, Location source) {
565  if (source.Equals(destination)) {
566    return;
567  }
568  if (destination.IsRegister()) {
569    if (source.IsRegister()) {
570      __ movl(destination.As<Register>(), source.As<Register>());
571    } else if (source.IsFpuRegister()) {
572      __ movd(destination.As<Register>(), source.As<XmmRegister>());
573    } else {
574      DCHECK(source.IsStackSlot());
575      __ movl(destination.As<Register>(), Address(ESP, source.GetStackIndex()));
576    }
577  } else if (destination.IsFpuRegister()) {
578    if (source.IsRegister()) {
579      __ movd(destination.As<XmmRegister>(), source.As<Register>());
580    } else if (source.IsFpuRegister()) {
581      __ movaps(destination.As<XmmRegister>(), source.As<XmmRegister>());
582    } else {
583      DCHECK(source.IsStackSlot());
584      __ movss(destination.As<XmmRegister>(), Address(ESP, source.GetStackIndex()));
585    }
586  } else {
587    DCHECK(destination.IsStackSlot()) << destination;
588    if (source.IsRegister()) {
589      __ movl(Address(ESP, destination.GetStackIndex()), source.As<Register>());
590    } else if (source.IsFpuRegister()) {
591      __ movss(Address(ESP, destination.GetStackIndex()), source.As<XmmRegister>());
592    } else {
593      DCHECK(source.IsStackSlot());
594      __ pushl(Address(ESP, source.GetStackIndex()));
595      __ popl(Address(ESP, destination.GetStackIndex()));
596    }
597  }
598}
599
600void CodeGeneratorX86::Move64(Location destination, Location source) {
601  if (source.Equals(destination)) {
602    return;
603  }
604  if (destination.IsRegisterPair()) {
605    if (source.IsRegisterPair()) {
606      __ movl(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
607      __ movl(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
608    } else if (source.IsFpuRegister()) {
609      LOG(FATAL) << "Unimplemented";
610    } else if (source.IsQuickParameter()) {
611      uint16_t register_index = source.GetQuickParameterRegisterIndex();
612      uint16_t stack_index = source.GetQuickParameterStackIndex();
613      InvokeDexCallingConvention calling_convention;
614      __ movl(destination.AsRegisterPairLow<Register>(),
615              calling_convention.GetRegisterAt(register_index));
616      __ movl(destination.AsRegisterPairHigh<Register>(), Address(ESP,
617          calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize()));
618    } else {
619      DCHECK(source.IsDoubleStackSlot());
620      __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
621      __ movl(destination.AsRegisterPairHigh<Register>(),
622              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
623    }
624  } else if (destination.IsQuickParameter()) {
625    InvokeDexCallingConvention calling_convention;
626    uint16_t register_index = destination.GetQuickParameterRegisterIndex();
627    uint16_t stack_index = destination.GetQuickParameterStackIndex();
628    if (source.IsRegister()) {
629      __ movl(calling_convention.GetRegisterAt(register_index), source.AsRegisterPairLow<Register>());
630      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(stack_index + 1)),
631              source.AsRegisterPairHigh<Register>());
632    } else if (source.IsFpuRegister()) {
633      LOG(FATAL) << "Unimplemented";
634    } else {
635      DCHECK(source.IsDoubleStackSlot());
636      __ movl(calling_convention.GetRegisterAt(register_index),
637              Address(ESP, source.GetStackIndex()));
638      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
639      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(stack_index + 1)));
640    }
641  } else if (destination.IsFpuRegister()) {
642    if (source.IsDoubleStackSlot()) {
643      __ movsd(destination.As<XmmRegister>(), Address(ESP, source.GetStackIndex()));
644    } else {
645      LOG(FATAL) << "Unimplemented";
646    }
647  } else {
648    DCHECK(destination.IsDoubleStackSlot()) << destination;
649    if (source.IsRegisterPair()) {
650      __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
651      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
652              source.AsRegisterPairHigh<Register>());
653    } else if (source.IsQuickParameter()) {
654      InvokeDexCallingConvention calling_convention;
655      uint16_t register_index = source.GetQuickParameterRegisterIndex();
656      uint16_t stack_index = source.GetQuickParameterStackIndex();
657      __ movl(Address(ESP, destination.GetStackIndex()),
658              calling_convention.GetRegisterAt(register_index));
659      DCHECK_EQ(calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize(),
660                static_cast<size_t>(destination.GetHighStackIndex(kX86WordSize)));
661    } else if (source.IsFpuRegister()) {
662      __ movsd(Address(ESP, destination.GetStackIndex()), source.As<XmmRegister>());
663    } else {
664      DCHECK(source.IsDoubleStackSlot());
665      __ pushl(Address(ESP, source.GetStackIndex()));
666      __ popl(Address(ESP, destination.GetStackIndex()));
667      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
668      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
669    }
670  }
671}
672
673void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
674  LocationSummary* locations = instruction->GetLocations();
675  if (locations != nullptr && locations->Out().Equals(location)) {
676    return;
677  }
678
679  if (locations != nullptr && locations->Out().IsConstant()) {
680    HConstant* const_to_move = locations->Out().GetConstant();
681    if (const_to_move->IsIntConstant()) {
682      Immediate imm(const_to_move->AsIntConstant()->GetValue());
683      if (location.IsRegister()) {
684        __ movl(location.As<Register>(), imm);
685      } else if (location.IsStackSlot()) {
686        __ movl(Address(ESP, location.GetStackIndex()), imm);
687      } else {
688        DCHECK(location.IsConstant());
689        DCHECK_EQ(location.GetConstant(), const_to_move);
690      }
691    } else if (const_to_move->IsLongConstant()) {
692      int64_t value = const_to_move->AsLongConstant()->GetValue();
693      if (location.IsRegisterPair()) {
694        __ movl(location.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
695        __ movl(location.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
696      } else if (location.IsDoubleStackSlot()) {
697        __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
698        __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
699      } else {
700        DCHECK(location.IsConstant());
701        DCHECK_EQ(location.GetConstant(), instruction);
702      }
703    }
704  } else if (instruction->IsTemporary()) {
705    Location temp_location = GetTemporaryLocation(instruction->AsTemporary());
706    if (temp_location.IsStackSlot()) {
707      Move32(location, temp_location);
708    } else {
709      DCHECK(temp_location.IsDoubleStackSlot());
710      Move64(location, temp_location);
711    }
712  } else if (instruction->IsLoadLocal()) {
713    int slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
714    switch (instruction->GetType()) {
715      case Primitive::kPrimBoolean:
716      case Primitive::kPrimByte:
717      case Primitive::kPrimChar:
718      case Primitive::kPrimShort:
719      case Primitive::kPrimInt:
720      case Primitive::kPrimNot:
721      case Primitive::kPrimFloat:
722        Move32(location, Location::StackSlot(slot));
723        break;
724
725      case Primitive::kPrimLong:
726      case Primitive::kPrimDouble:
727        Move64(location, Location::DoubleStackSlot(slot));
728        break;
729
730      default:
731        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
732    }
733  } else {
734    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
735    switch (instruction->GetType()) {
736      case Primitive::kPrimBoolean:
737      case Primitive::kPrimByte:
738      case Primitive::kPrimChar:
739      case Primitive::kPrimShort:
740      case Primitive::kPrimInt:
741      case Primitive::kPrimNot:
742      case Primitive::kPrimFloat:
743        Move32(location, locations->Out());
744        break;
745
746      case Primitive::kPrimLong:
747      case Primitive::kPrimDouble:
748        Move64(location, locations->Out());
749        break;
750
751      default:
752        LOG(FATAL) << "Unexpected type " << instruction->GetType();
753    }
754  }
755}
756
757void LocationsBuilderX86::VisitGoto(HGoto* got) {
758  got->SetLocations(nullptr);
759}
760
761void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
762  HBasicBlock* successor = got->GetSuccessor();
763  DCHECK(!successor->IsExitBlock());
764
765  HBasicBlock* block = got->GetBlock();
766  HInstruction* previous = got->GetPrevious();
767
768  HLoopInformation* info = block->GetLoopInformation();
769  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
770    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
771    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
772    return;
773  }
774
775  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
776    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
777  }
778  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
779    __ jmp(codegen_->GetLabelOf(successor));
780  }
781}
782
783void LocationsBuilderX86::VisitExit(HExit* exit) {
784  exit->SetLocations(nullptr);
785}
786
787void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
788  UNUSED(exit);
789  if (kIsDebugBuild) {
790    __ Comment("Unreachable");
791    __ int3();
792  }
793}
794
795void LocationsBuilderX86::VisitIf(HIf* if_instr) {
796  LocationSummary* locations =
797      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
798  HInstruction* cond = if_instr->InputAt(0);
799  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
800    locations->SetInAt(0, Location::Any());
801  }
802}
803
804void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
805  HInstruction* cond = if_instr->InputAt(0);
806  if (cond->IsIntConstant()) {
807    // Constant condition, statically compared against 1.
808    int32_t cond_value = cond->AsIntConstant()->GetValue();
809    if (cond_value == 1) {
810      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
811                                     if_instr->IfTrueSuccessor())) {
812        __ jmp(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
813      }
814      return;
815    } else {
816      DCHECK_EQ(cond_value, 0);
817    }
818  } else {
819    bool materialized =
820        !cond->IsCondition() || cond->AsCondition()->NeedsMaterialization();
821    // Moves do not affect the eflags register, so if the condition is
822    // evaluated just before the if, we don't need to evaluate it
823    // again.
824    bool eflags_set = cond->IsCondition()
825        && cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr);
826    if (materialized) {
827      if (!eflags_set) {
828        // Materialized condition, compare against 0.
829        Location lhs = if_instr->GetLocations()->InAt(0);
830        if (lhs.IsRegister()) {
831          __ cmpl(lhs.As<Register>(), Immediate(0));
832        } else {
833          __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
834        }
835        __ j(kNotEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
836      } else {
837        __ j(X86Condition(cond->AsCondition()->GetCondition()),
838             codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
839      }
840    } else {
841      Location lhs = cond->GetLocations()->InAt(0);
842      Location rhs = cond->GetLocations()->InAt(1);
843      // LHS is guaranteed to be in a register (see
844      // LocationsBuilderX86::VisitCondition).
845      if (rhs.IsRegister()) {
846        __ cmpl(lhs.As<Register>(), rhs.As<Register>());
847      } else if (rhs.IsConstant()) {
848        HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
849        Immediate imm(instruction->AsIntConstant()->GetValue());
850        __ cmpl(lhs.As<Register>(), imm);
851      } else {
852        __ cmpl(lhs.As<Register>(), Address(ESP, rhs.GetStackIndex()));
853      }
854      __ j(X86Condition(cond->AsCondition()->GetCondition()),
855           codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
856    }
857  }
858  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
859                                 if_instr->IfFalseSuccessor())) {
860    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
861  }
862}
863
864void LocationsBuilderX86::VisitLocal(HLocal* local) {
865  local->SetLocations(nullptr);
866}
867
868void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
869  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
870}
871
872void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
873  local->SetLocations(nullptr);
874}
875
876void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
877  // Nothing to do, this is driven by the code generator.
878  UNUSED(load);
879}
880
881void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
882  LocationSummary* locations =
883      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
884  switch (store->InputAt(1)->GetType()) {
885    case Primitive::kPrimBoolean:
886    case Primitive::kPrimByte:
887    case Primitive::kPrimChar:
888    case Primitive::kPrimShort:
889    case Primitive::kPrimInt:
890    case Primitive::kPrimNot:
891    case Primitive::kPrimFloat:
892      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
893      break;
894
895    case Primitive::kPrimLong:
896    case Primitive::kPrimDouble:
897      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
898      break;
899
900    default:
901      LOG(FATAL) << "Unknown local type " << store->InputAt(1)->GetType();
902  }
903  store->SetLocations(locations);
904}
905
906void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
907  UNUSED(store);
908}
909
910void LocationsBuilderX86::VisitCondition(HCondition* comp) {
911  LocationSummary* locations =
912      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
913  locations->SetInAt(0, Location::RequiresRegister());
914  locations->SetInAt(1, Location::Any());
915  if (comp->NeedsMaterialization()) {
916    locations->SetOut(Location::RequiresRegister());
917  }
918}
919
920void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
921  if (comp->NeedsMaterialization()) {
922    LocationSummary* locations = comp->GetLocations();
923    Register reg = locations->Out().As<Register>();
924    // Clear register: setcc only sets the low byte.
925    __ xorl(reg, reg);
926    if (locations->InAt(1).IsRegister()) {
927      __ cmpl(locations->InAt(0).As<Register>(),
928              locations->InAt(1).As<Register>());
929    } else if (locations->InAt(1).IsConstant()) {
930      HConstant* instruction = locations->InAt(1).GetConstant();
931      Immediate imm(instruction->AsIntConstant()->GetValue());
932      __ cmpl(locations->InAt(0).As<Register>(), imm);
933    } else {
934      __ cmpl(locations->InAt(0).As<Register>(),
935              Address(ESP, locations->InAt(1).GetStackIndex()));
936    }
937    __ setb(X86Condition(comp->GetCondition()), reg);
938  }
939}
940
941void LocationsBuilderX86::VisitEqual(HEqual* comp) {
942  VisitCondition(comp);
943}
944
945void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
946  VisitCondition(comp);
947}
948
949void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
950  VisitCondition(comp);
951}
952
953void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
954  VisitCondition(comp);
955}
956
957void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
958  VisitCondition(comp);
959}
960
961void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
962  VisitCondition(comp);
963}
964
965void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
966  VisitCondition(comp);
967}
968
969void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
970  VisitCondition(comp);
971}
972
973void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
974  VisitCondition(comp);
975}
976
977void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
978  VisitCondition(comp);
979}
980
981void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
982  VisitCondition(comp);
983}
984
985void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
986  VisitCondition(comp);
987}
988
989void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
990  LocationSummary* locations =
991      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
992  locations->SetOut(Location::ConstantLocation(constant));
993}
994
995void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
996  // Will be generated at use site.
997  UNUSED(constant);
998}
999
1000void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
1001  LocationSummary* locations =
1002      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1003  locations->SetOut(Location::ConstantLocation(constant));
1004}
1005
1006void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
1007  // Will be generated at use site.
1008  UNUSED(constant);
1009}
1010
1011void LocationsBuilderX86::VisitFloatConstant(HFloatConstant* constant) {
1012  LocationSummary* locations =
1013      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1014  locations->SetOut(Location::ConstantLocation(constant));
1015}
1016
1017void InstructionCodeGeneratorX86::VisitFloatConstant(HFloatConstant* constant) {
1018  // Will be generated at use site.
1019  UNUSED(constant);
1020}
1021
1022void LocationsBuilderX86::VisitDoubleConstant(HDoubleConstant* constant) {
1023  LocationSummary* locations =
1024      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
1025  locations->SetOut(Location::ConstantLocation(constant));
1026}
1027
1028void InstructionCodeGeneratorX86::VisitDoubleConstant(HDoubleConstant* constant) {
1029  // Will be generated at use site.
1030  UNUSED(constant);
1031}
1032
1033void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
1034  ret->SetLocations(nullptr);
1035}
1036
1037void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
1038  UNUSED(ret);
1039  codegen_->GenerateFrameExit();
1040  __ ret();
1041}
1042
1043void LocationsBuilderX86::VisitReturn(HReturn* ret) {
1044  LocationSummary* locations =
1045      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
1046  switch (ret->InputAt(0)->GetType()) {
1047    case Primitive::kPrimBoolean:
1048    case Primitive::kPrimByte:
1049    case Primitive::kPrimChar:
1050    case Primitive::kPrimShort:
1051    case Primitive::kPrimInt:
1052    case Primitive::kPrimNot:
1053      locations->SetInAt(0, Location::RegisterLocation(EAX));
1054      break;
1055
1056    case Primitive::kPrimLong:
1057      locations->SetInAt(
1058          0, Location::RegisterPairLocation(EAX, EDX));
1059      break;
1060
1061    case Primitive::kPrimFloat:
1062    case Primitive::kPrimDouble:
1063      locations->SetInAt(
1064          0, Location::FpuRegisterLocation(XMM0));
1065      break;
1066
1067    default:
1068      LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
1069  }
1070}
1071
1072void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
1073  if (kIsDebugBuild) {
1074    switch (ret->InputAt(0)->GetType()) {
1075      case Primitive::kPrimBoolean:
1076      case Primitive::kPrimByte:
1077      case Primitive::kPrimChar:
1078      case Primitive::kPrimShort:
1079      case Primitive::kPrimInt:
1080      case Primitive::kPrimNot:
1081        DCHECK_EQ(ret->GetLocations()->InAt(0).As<Register>(), EAX);
1082        break;
1083
1084      case Primitive::kPrimLong:
1085        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
1086        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
1087        break;
1088
1089      case Primitive::kPrimFloat:
1090      case Primitive::kPrimDouble:
1091        DCHECK_EQ(ret->GetLocations()->InAt(0).As<XmmRegister>(), XMM0);
1092        break;
1093
1094      default:
1095        LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
1096    }
1097  }
1098  codegen_->GenerateFrameExit();
1099  __ ret();
1100}
1101
1102void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
1103  HandleInvoke(invoke);
1104}
1105
1106void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
1107  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1108
1109  // TODO: Implement all kinds of calls:
1110  // 1) boot -> boot
1111  // 2) app -> boot
1112  // 3) app -> app
1113  //
1114  // Currently we implement the app -> app logic, which looks up in the resolve cache.
1115
1116  // temp = method;
1117  codegen_->LoadCurrentMethod(temp);
1118  // temp = temp->dex_cache_resolved_methods_;
1119  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
1120  // temp = temp[index_in_cache]
1121  __ movl(temp, Address(temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache())));
1122  // (temp + offset_of_quick_compiled_code)()
1123  __ call(Address(
1124      temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86WordSize).Int32Value()));
1125
1126  DCHECK(!codegen_->IsLeafMethod());
1127  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1128}
1129
1130void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1131  HandleInvoke(invoke);
1132}
1133
1134void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
1135  LocationSummary* locations =
1136      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1137  locations->AddTemp(Location::RegisterLocation(EAX));
1138
1139  InvokeDexCallingConventionVisitor calling_convention_visitor;
1140  for (size_t i = 0; i < invoke->InputCount(); i++) {
1141    HInstruction* input = invoke->InputAt(i);
1142    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1143  }
1144
1145  switch (invoke->GetType()) {
1146    case Primitive::kPrimBoolean:
1147    case Primitive::kPrimByte:
1148    case Primitive::kPrimChar:
1149    case Primitive::kPrimShort:
1150    case Primitive::kPrimInt:
1151    case Primitive::kPrimNot:
1152      locations->SetOut(Location::RegisterLocation(EAX));
1153      break;
1154
1155    case Primitive::kPrimLong:
1156      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1157      break;
1158
1159    case Primitive::kPrimVoid:
1160      break;
1161
1162    case Primitive::kPrimDouble:
1163    case Primitive::kPrimFloat:
1164      locations->SetOut(Location::FpuRegisterLocation(XMM0));
1165      break;
1166  }
1167
1168  invoke->SetLocations(locations);
1169}
1170
1171void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1172  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1173  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1174          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1175  LocationSummary* locations = invoke->GetLocations();
1176  Location receiver = locations->InAt(0);
1177  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1178  // temp = object->GetClass();
1179  if (receiver.IsStackSlot()) {
1180    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
1181    __ movl(temp, Address(temp, class_offset));
1182  } else {
1183    __ movl(temp, Address(receiver.As<Register>(), class_offset));
1184  }
1185  // temp = temp->GetMethodAt(method_offset);
1186  __ movl(temp, Address(temp, method_offset));
1187  // call temp->GetEntryPoint();
1188  __ call(Address(
1189      temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(kX86WordSize).Int32Value()));
1190
1191  DCHECK(!codegen_->IsLeafMethod());
1192  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1193}
1194
1195void LocationsBuilderX86::VisitInvokeInterface(HInvokeInterface* invoke) {
1196  HandleInvoke(invoke);
1197  // Add the hidden argument.
1198  invoke->GetLocations()->AddTemp(Location::FpuRegisterLocation(XMM0));
1199}
1200
1201void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) {
1202  // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError.
1203  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1204  uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() +
1205          (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry);
1206  LocationSummary* locations = invoke->GetLocations();
1207  Location receiver = locations->InAt(0);
1208  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1209
1210  // Set the hidden argument.
1211  __ movl(temp, Immediate(invoke->GetDexMethodIndex()));
1212  __ movd(invoke->GetLocations()->GetTemp(1).As<XmmRegister>(), temp);
1213
1214  // temp = object->GetClass();
1215  if (receiver.IsStackSlot()) {
1216    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
1217    __ movl(temp, Address(temp, class_offset));
1218  } else {
1219    __ movl(temp, Address(receiver.As<Register>(), class_offset));
1220  }
1221  // temp = temp->GetImtEntryAt(method_offset);
1222  __ movl(temp, Address(temp, method_offset));
1223  // call temp->GetEntryPoint();
1224  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset(
1225      kX86WordSize).Int32Value()));
1226
1227  DCHECK(!codegen_->IsLeafMethod());
1228  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1229}
1230
1231void LocationsBuilderX86::VisitNeg(HNeg* neg) {
1232  LocationSummary* locations =
1233      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1234  switch (neg->GetResultType()) {
1235    case Primitive::kPrimInt:
1236    case Primitive::kPrimLong:
1237      locations->SetInAt(0, Location::RequiresRegister());
1238      locations->SetOut(Location::SameAsFirstInput());
1239      break;
1240
1241    case Primitive::kPrimFloat:
1242    case Primitive::kPrimDouble:
1243      locations->SetInAt(0, Location::RequiresFpuRegister());
1244      // Output overlaps as we need a fresh (zero-initialized)
1245      // register to perform subtraction from zero.
1246      locations->SetOut(Location::RequiresFpuRegister());
1247      break;
1248
1249    default:
1250      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1251  }
1252}
1253
1254void InstructionCodeGeneratorX86::VisitNeg(HNeg* neg) {
1255  LocationSummary* locations = neg->GetLocations();
1256  Location out = locations->Out();
1257  Location in = locations->InAt(0);
1258  switch (neg->GetResultType()) {
1259    case Primitive::kPrimInt:
1260      DCHECK(in.IsRegister());
1261      DCHECK(in.Equals(out));
1262      __ negl(out.As<Register>());
1263      break;
1264
1265    case Primitive::kPrimLong:
1266      DCHECK(in.IsRegisterPair());
1267      DCHECK(in.Equals(out));
1268      __ negl(out.AsRegisterPairLow<Register>());
1269      // Negation is similar to subtraction from zero.  The least
1270      // significant byte triggers a borrow when it is different from
1271      // zero; to take it into account, add 1 to the most significant
1272      // byte if the carry flag (CF) is set to 1 after the first NEGL
1273      // operation.
1274      __ adcl(out.AsRegisterPairHigh<Register>(), Immediate(0));
1275      __ negl(out.AsRegisterPairHigh<Register>());
1276      break;
1277
1278    case Primitive::kPrimFloat:
1279      DCHECK(!in.Equals(out));
1280      // out = 0
1281      __ xorps(out.As<XmmRegister>(), out.As<XmmRegister>());
1282      // out = out - in
1283      __ subss(out.As<XmmRegister>(), in.As<XmmRegister>());
1284      break;
1285
1286    case Primitive::kPrimDouble:
1287      DCHECK(!in.Equals(out));
1288      // out = 0
1289      __ xorpd(out.As<XmmRegister>(), out.As<XmmRegister>());
1290      // out = out - in
1291      __ subsd(out.As<XmmRegister>(), in.As<XmmRegister>());
1292      break;
1293
1294    default:
1295      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1296  }
1297}
1298
1299void LocationsBuilderX86::VisitTypeConversion(HTypeConversion* conversion) {
1300  LocationSummary* locations =
1301      new (GetGraph()->GetArena()) LocationSummary(conversion, LocationSummary::kNoCall);
1302  Primitive::Type result_type = conversion->GetResultType();
1303  Primitive::Type input_type = conversion->GetInputType();
1304  switch (result_type) {
1305    case Primitive::kPrimByte:
1306      switch (input_type) {
1307        case Primitive::kPrimShort:
1308        case Primitive::kPrimInt:
1309        case Primitive::kPrimChar:
1310          // Processing a Dex `int-to-byte' instruction.
1311          locations->SetInAt(0, Location::Any());
1312          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1313          break;
1314
1315        default:
1316          LOG(FATAL) << "Unexpected type conversion from " << input_type
1317                     << " to " << result_type;
1318      }
1319      break;
1320
1321    case Primitive::kPrimShort:
1322      switch (input_type) {
1323        case Primitive::kPrimByte:
1324        case Primitive::kPrimInt:
1325        case Primitive::kPrimChar:
1326          // Processing a Dex `int-to-short' instruction.
1327          locations->SetInAt(0, Location::Any());
1328          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1329          break;
1330
1331        default:
1332          LOG(FATAL) << "Unexpected type conversion from " << input_type
1333                     << " to " << result_type;
1334      }
1335      break;
1336
1337    case Primitive::kPrimInt:
1338      switch (input_type) {
1339        case Primitive::kPrimLong:
1340          // Processing a Dex `long-to-int' instruction.
1341          locations->SetInAt(0, Location::Any());
1342          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1343          break;
1344
1345        case Primitive::kPrimFloat:
1346        case Primitive::kPrimDouble:
1347          LOG(FATAL) << "Type conversion from " << input_type
1348                     << " to " << result_type << " not yet implemented";
1349          break;
1350
1351        default:
1352          LOG(FATAL) << "Unexpected type conversion from " << input_type
1353                     << " to " << result_type;
1354      }
1355      break;
1356
1357    case Primitive::kPrimLong:
1358      switch (input_type) {
1359        case Primitive::kPrimByte:
1360        case Primitive::kPrimShort:
1361        case Primitive::kPrimInt:
1362        case Primitive::kPrimChar:
1363          // Processing a Dex `int-to-long' instruction.
1364          locations->SetInAt(0, Location::RegisterLocation(EAX));
1365          locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1366          break;
1367
1368        case Primitive::kPrimFloat:
1369        case Primitive::kPrimDouble:
1370          LOG(FATAL) << "Type conversion from " << input_type << " to "
1371                     << result_type << " not yet implemented";
1372          break;
1373
1374        default:
1375          LOG(FATAL) << "Unexpected type conversion from " << input_type
1376                     << " to " << result_type;
1377      }
1378      break;
1379
1380    case Primitive::kPrimChar:
1381      switch (input_type) {
1382        case Primitive::kPrimByte:
1383        case Primitive::kPrimShort:
1384        case Primitive::kPrimInt:
1385        case Primitive::kPrimChar:
1386          // Processing a Dex `int-to-char' instruction.
1387          locations->SetInAt(0, Location::Any());
1388          locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1389          break;
1390
1391        default:
1392          LOG(FATAL) << "Unexpected type conversion from " << input_type
1393                     << " to " << result_type;
1394      }
1395      break;
1396
1397    case Primitive::kPrimFloat:
1398      switch (input_type) {
1399        case Primitive::kPrimByte:
1400        case Primitive::kPrimShort:
1401        case Primitive::kPrimInt:
1402        case Primitive::kPrimChar:
1403          // Processing a Dex `int-to-float' instruction.
1404          locations->SetInAt(0, Location::RequiresRegister());
1405          locations->SetOut(Location::RequiresFpuRegister());
1406          break;
1407
1408        case Primitive::kPrimLong:
1409        case Primitive::kPrimDouble:
1410          LOG(FATAL) << "Type conversion from " << input_type
1411                     << " to " << result_type << " not yet implemented";
1412          break;
1413
1414        default:
1415          LOG(FATAL) << "Unexpected type conversion from " << input_type
1416                     << " to " << result_type;
1417      };
1418      break;
1419
1420    case Primitive::kPrimDouble:
1421      switch (input_type) {
1422        case Primitive::kPrimByte:
1423        case Primitive::kPrimShort:
1424        case Primitive::kPrimInt:
1425        case Primitive::kPrimChar:
1426          // Processing a Dex `int-to-double' instruction.
1427          locations->SetInAt(0, Location::RequiresRegister());
1428          locations->SetOut(Location::RequiresFpuRegister());
1429          break;
1430
1431        case Primitive::kPrimLong:
1432        case Primitive::kPrimFloat:
1433          LOG(FATAL) << "Type conversion from " << input_type
1434                     << " to " << result_type << " not yet implemented";
1435          break;
1436
1437        default:
1438          LOG(FATAL) << "Unexpected type conversion from " << input_type
1439                     << " to " << result_type;
1440      }
1441      break;
1442
1443    default:
1444      LOG(FATAL) << "Unexpected type conversion from " << input_type
1445                 << " to " << result_type;
1446  }
1447}
1448
1449void InstructionCodeGeneratorX86::VisitTypeConversion(HTypeConversion* conversion) {
1450  LocationSummary* locations = conversion->GetLocations();
1451  Location out = locations->Out();
1452  Location in = locations->InAt(0);
1453  Primitive::Type result_type = conversion->GetResultType();
1454  Primitive::Type input_type = conversion->GetInputType();
1455  switch (result_type) {
1456    case Primitive::kPrimByte:
1457      switch (input_type) {
1458        case Primitive::kPrimShort:
1459        case Primitive::kPrimInt:
1460        case Primitive::kPrimChar:
1461          // Processing a Dex `int-to-byte' instruction.
1462          if (in.IsRegister()) {
1463            __ movsxb(out.As<Register>(), in.As<ByteRegister>());
1464          } else if (in.IsStackSlot()) {
1465            __ movsxb(out.As<Register>(), Address(ESP, in.GetStackIndex()));
1466          } else {
1467            DCHECK(in.GetConstant()->IsIntConstant());
1468            int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
1469            __ movl(out.As<Register>(), Immediate(static_cast<int8_t>(value)));
1470          }
1471          break;
1472
1473        default:
1474          LOG(FATAL) << "Unexpected type conversion from " << input_type
1475                     << " to " << result_type;
1476      }
1477      break;
1478
1479    case Primitive::kPrimShort:
1480      switch (input_type) {
1481        case Primitive::kPrimByte:
1482        case Primitive::kPrimInt:
1483        case Primitive::kPrimChar:
1484          // Processing a Dex `int-to-short' instruction.
1485          if (in.IsRegister()) {
1486            __ movsxw(out.As<Register>(), in.As<Register>());
1487          } else if (in.IsStackSlot()) {
1488            __ movsxw(out.As<Register>(), Address(ESP, in.GetStackIndex()));
1489          } else {
1490            DCHECK(in.GetConstant()->IsIntConstant());
1491            int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
1492            __ movl(out.As<Register>(), Immediate(static_cast<int16_t>(value)));
1493          }
1494          break;
1495
1496        default:
1497          LOG(FATAL) << "Unexpected type conversion from " << input_type
1498                     << " to " << result_type;
1499      }
1500      break;
1501
1502    case Primitive::kPrimInt:
1503      switch (input_type) {
1504        case Primitive::kPrimLong:
1505          // Processing a Dex `long-to-int' instruction.
1506          if (in.IsRegisterPair()) {
1507            __ movl(out.As<Register>(), in.AsRegisterPairLow<Register>());
1508          } else if (in.IsDoubleStackSlot()) {
1509            __ movl(out.As<Register>(), Address(ESP, in.GetStackIndex()));
1510          } else {
1511            DCHECK(in.IsConstant());
1512            DCHECK(in.GetConstant()->IsLongConstant());
1513            int64_t value = in.GetConstant()->AsLongConstant()->GetValue();
1514            __ movl(out.As<Register>(), Immediate(static_cast<int32_t>(value)));
1515          }
1516          break;
1517
1518        case Primitive::kPrimFloat:
1519        case Primitive::kPrimDouble:
1520          LOG(FATAL) << "Type conversion from " << input_type
1521                     << " to " << result_type << " not yet implemented";
1522          break;
1523
1524        default:
1525          LOG(FATAL) << "Unexpected type conversion from " << input_type
1526                     << " to " << result_type;
1527      }
1528      break;
1529
1530    case Primitive::kPrimLong:
1531      switch (input_type) {
1532        case Primitive::kPrimByte:
1533        case Primitive::kPrimShort:
1534        case Primitive::kPrimInt:
1535        case Primitive::kPrimChar:
1536          // Processing a Dex `int-to-long' instruction.
1537          DCHECK_EQ(out.AsRegisterPairLow<Register>(), EAX);
1538          DCHECK_EQ(out.AsRegisterPairHigh<Register>(), EDX);
1539          DCHECK_EQ(in.As<Register>(), EAX);
1540          __ cdq();
1541          break;
1542
1543        case Primitive::kPrimFloat:
1544        case Primitive::kPrimDouble:
1545          LOG(FATAL) << "Type conversion from " << input_type << " to "
1546                     << result_type << " not yet implemented";
1547          break;
1548
1549        default:
1550          LOG(FATAL) << "Unexpected type conversion from " << input_type
1551                     << " to " << result_type;
1552      }
1553      break;
1554
1555    case Primitive::kPrimChar:
1556      switch (input_type) {
1557        case Primitive::kPrimByte:
1558        case Primitive::kPrimShort:
1559        case Primitive::kPrimInt:
1560        case Primitive::kPrimChar:
1561          // Processing a Dex `Process a Dex `int-to-char'' instruction.
1562          if (in.IsRegister()) {
1563            __ movzxw(out.As<Register>(), in.As<Register>());
1564          } else if (in.IsStackSlot()) {
1565            __ movzxw(out.As<Register>(), Address(ESP, in.GetStackIndex()));
1566          } else {
1567            DCHECK(in.GetConstant()->IsIntConstant());
1568            int32_t value = in.GetConstant()->AsIntConstant()->GetValue();
1569            __ movl(out.As<Register>(), Immediate(static_cast<uint16_t>(value)));
1570          }
1571          break;
1572
1573        default:
1574          LOG(FATAL) << "Unexpected type conversion from " << input_type
1575                     << " to " << result_type;
1576      }
1577      break;
1578
1579    case Primitive::kPrimFloat:
1580      switch (input_type) {
1581          // Processing a Dex `int-to-float' instruction.
1582        case Primitive::kPrimByte:
1583        case Primitive::kPrimShort:
1584        case Primitive::kPrimInt:
1585        case Primitive::kPrimChar:
1586          __ cvtsi2ss(out.As<XmmRegister>(), in.As<Register>());
1587          break;
1588
1589        case Primitive::kPrimLong:
1590        case Primitive::kPrimDouble:
1591          LOG(FATAL) << "Type conversion from " << input_type
1592                     << " to " << result_type << " not yet implemented";
1593          break;
1594
1595        default:
1596          LOG(FATAL) << "Unexpected type conversion from " << input_type
1597                     << " to " << result_type;
1598      };
1599      break;
1600
1601    case Primitive::kPrimDouble:
1602      switch (input_type) {
1603          // Processing a Dex `int-to-double' instruction.
1604        case Primitive::kPrimByte:
1605        case Primitive::kPrimShort:
1606        case Primitive::kPrimInt:
1607        case Primitive::kPrimChar:
1608          __ cvtsi2sd(out.As<XmmRegister>(), in.As<Register>());
1609          break;
1610
1611        case Primitive::kPrimLong:
1612        case Primitive::kPrimFloat:
1613          LOG(FATAL) << "Type conversion from " << input_type
1614                     << " to " << result_type << " not yet implemented";
1615          break;
1616
1617        default:
1618          LOG(FATAL) << "Unexpected type conversion from " << input_type
1619                     << " to " << result_type;
1620      };
1621      break;
1622
1623    default:
1624      LOG(FATAL) << "Unexpected type conversion from " << input_type
1625                 << " to " << result_type;
1626  }
1627}
1628
1629void LocationsBuilderX86::VisitAdd(HAdd* add) {
1630  LocationSummary* locations =
1631      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1632  switch (add->GetResultType()) {
1633    case Primitive::kPrimInt:
1634    case Primitive::kPrimLong: {
1635      locations->SetInAt(0, Location::RequiresRegister());
1636      locations->SetInAt(1, Location::Any());
1637      locations->SetOut(Location::SameAsFirstInput());
1638      break;
1639    }
1640
1641    case Primitive::kPrimFloat:
1642    case Primitive::kPrimDouble: {
1643      locations->SetInAt(0, Location::RequiresFpuRegister());
1644      locations->SetInAt(1, Location::Any());
1645      locations->SetOut(Location::SameAsFirstInput());
1646      break;
1647    }
1648
1649    default:
1650      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1651      break;
1652  }
1653}
1654
1655void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
1656  LocationSummary* locations = add->GetLocations();
1657  Location first = locations->InAt(0);
1658  Location second = locations->InAt(1);
1659  DCHECK(first.Equals(locations->Out()));
1660  switch (add->GetResultType()) {
1661    case Primitive::kPrimInt: {
1662      if (second.IsRegister()) {
1663        __ addl(first.As<Register>(), second.As<Register>());
1664      } else if (second.IsConstant()) {
1665        __ addl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
1666      } else {
1667        __ addl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
1668      }
1669      break;
1670    }
1671
1672    case Primitive::kPrimLong: {
1673      if (second.IsRegisterPair()) {
1674        __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
1675        __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
1676      } else {
1677        __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
1678        __ adcl(first.AsRegisterPairHigh<Register>(),
1679                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
1680      }
1681      break;
1682    }
1683
1684    case Primitive::kPrimFloat: {
1685      if (second.IsFpuRegister()) {
1686        __ addss(first.As<XmmRegister>(), second.As<XmmRegister>());
1687      } else {
1688        __ addss(first.As<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1689      }
1690      break;
1691    }
1692
1693    case Primitive::kPrimDouble: {
1694      if (second.IsFpuRegister()) {
1695        __ addsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1696      } else {
1697        __ addsd(first.As<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1698      }
1699      break;
1700    }
1701
1702    default:
1703      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1704  }
1705}
1706
1707void LocationsBuilderX86::VisitSub(HSub* sub) {
1708  LocationSummary* locations =
1709      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1710  switch (sub->GetResultType()) {
1711    case Primitive::kPrimInt:
1712    case Primitive::kPrimLong: {
1713      locations->SetInAt(0, Location::RequiresRegister());
1714      locations->SetInAt(1, Location::Any());
1715      locations->SetOut(Location::SameAsFirstInput());
1716      break;
1717    }
1718    case Primitive::kPrimFloat:
1719    case Primitive::kPrimDouble: {
1720      locations->SetInAt(0, Location::RequiresFpuRegister());
1721      locations->SetInAt(1, Location::RequiresFpuRegister());
1722      locations->SetOut(Location::SameAsFirstInput());
1723      break;
1724    }
1725
1726    default:
1727      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1728  }
1729}
1730
1731void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
1732  LocationSummary* locations = sub->GetLocations();
1733  Location first = locations->InAt(0);
1734  Location second = locations->InAt(1);
1735  DCHECK(first.Equals(locations->Out()));
1736  switch (sub->GetResultType()) {
1737    case Primitive::kPrimInt: {
1738      if (second.IsRegister()) {
1739        __ subl(first.As<Register>(), second.As<Register>());
1740      } else if (second.IsConstant()) {
1741        __ subl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
1742      } else {
1743        __ subl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
1744      }
1745      break;
1746    }
1747
1748    case Primitive::kPrimLong: {
1749      if (second.IsRegisterPair()) {
1750        __ subl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
1751        __ sbbl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
1752      } else {
1753        __ subl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
1754        __ sbbl(first.AsRegisterPairHigh<Register>(),
1755                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
1756      }
1757      break;
1758    }
1759
1760    case Primitive::kPrimFloat: {
1761      __ subss(first.As<XmmRegister>(), second.As<XmmRegister>());
1762      break;
1763    }
1764
1765    case Primitive::kPrimDouble: {
1766      __ subsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1767      break;
1768    }
1769
1770    default:
1771      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1772  }
1773}
1774
1775void LocationsBuilderX86::VisitMul(HMul* mul) {
1776  LocationSummary* locations =
1777      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1778  switch (mul->GetResultType()) {
1779    case Primitive::kPrimInt:
1780      locations->SetInAt(0, Location::RequiresRegister());
1781      locations->SetInAt(1, Location::Any());
1782      locations->SetOut(Location::SameAsFirstInput());
1783      break;
1784    case Primitive::kPrimLong: {
1785      locations->SetInAt(0, Location::RequiresRegister());
1786      // TODO: Currently this handles only stack operands:
1787      // - we don't have enough registers because we currently use Quick ABI.
1788      // - by the time we have a working register allocator we will probably change the ABI
1789      // and fix the above.
1790      // - we don't have a way yet to request operands on stack but the base line compiler
1791      // will leave the operands on the stack with Any().
1792      locations->SetInAt(1, Location::Any());
1793      locations->SetOut(Location::SameAsFirstInput());
1794      // Needed for imul on 32bits with 64bits output.
1795      locations->AddTemp(Location::RegisterLocation(EAX));
1796      locations->AddTemp(Location::RegisterLocation(EDX));
1797      break;
1798    }
1799    case Primitive::kPrimFloat:
1800    case Primitive::kPrimDouble: {
1801      locations->SetInAt(0, Location::RequiresFpuRegister());
1802      locations->SetInAt(1, Location::RequiresFpuRegister());
1803      locations->SetOut(Location::SameAsFirstInput());
1804      break;
1805    }
1806
1807    default:
1808      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1809  }
1810}
1811
1812void InstructionCodeGeneratorX86::VisitMul(HMul* mul) {
1813  LocationSummary* locations = mul->GetLocations();
1814  Location first = locations->InAt(0);
1815  Location second = locations->InAt(1);
1816  DCHECK(first.Equals(locations->Out()));
1817
1818  switch (mul->GetResultType()) {
1819    case Primitive::kPrimInt: {
1820      if (second.IsRegister()) {
1821        __ imull(first.As<Register>(), second.As<Register>());
1822      } else if (second.IsConstant()) {
1823        Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
1824        __ imull(first.As<Register>(), imm);
1825      } else {
1826        DCHECK(second.IsStackSlot());
1827        __ imull(first.As<Register>(), Address(ESP, second.GetStackIndex()));
1828      }
1829      break;
1830    }
1831
1832    case Primitive::kPrimLong: {
1833      DCHECK(second.IsDoubleStackSlot());
1834
1835      Register in1_hi = first.AsRegisterPairHigh<Register>();
1836      Register in1_lo = first.AsRegisterPairLow<Register>();
1837      Address in2_hi(ESP, second.GetHighStackIndex(kX86WordSize));
1838      Address in2_lo(ESP, second.GetStackIndex());
1839      Register eax = locations->GetTemp(0).As<Register>();
1840      Register edx = locations->GetTemp(1).As<Register>();
1841
1842      DCHECK_EQ(EAX, eax);
1843      DCHECK_EQ(EDX, edx);
1844
1845      // input: in1 - 64 bits, in2 - 64 bits
1846      // output: in1
1847      // formula: in1.hi : in1.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1848      // parts: in1.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1849      // parts: in1.lo = (in1.lo * in2.lo)[31:0]
1850
1851      __ movl(eax, in2_hi);
1852      // eax <- in1.lo * in2.hi
1853      __ imull(eax, in1_lo);
1854      // in1.hi <- in1.hi * in2.lo
1855      __ imull(in1_hi, in2_lo);
1856      // in1.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1857      __ addl(in1_hi, eax);
1858      // move in1_lo to eax to prepare for double precision
1859      __ movl(eax, in1_lo);
1860      // edx:eax <- in1.lo * in2.lo
1861      __ mull(in2_lo);
1862      // in1.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1863      __ addl(in1_hi, edx);
1864      // in1.lo <- (in1.lo * in2.lo)[31:0];
1865      __ movl(in1_lo, eax);
1866
1867      break;
1868    }
1869
1870    case Primitive::kPrimFloat: {
1871      __ mulss(first.As<XmmRegister>(), second.As<XmmRegister>());
1872      break;
1873    }
1874
1875    case Primitive::kPrimDouble: {
1876      __ mulsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1877      break;
1878    }
1879
1880    default:
1881      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1882  }
1883}
1884
1885void InstructionCodeGeneratorX86::GenerateDivRemIntegral(HBinaryOperation* instruction) {
1886  DCHECK(instruction->IsDiv() || instruction->IsRem());
1887
1888  LocationSummary* locations = instruction->GetLocations();
1889  Location out = locations->Out();
1890  Location first = locations->InAt(0);
1891  Location second = locations->InAt(1);
1892  bool is_div = instruction->IsDiv();
1893
1894  switch (instruction->GetResultType()) {
1895    case Primitive::kPrimInt: {
1896      Register second_reg = second.As<Register>();
1897      DCHECK_EQ(EAX, first.As<Register>());
1898      DCHECK_EQ(is_div ? EAX : EDX, out.As<Register>());
1899
1900      SlowPathCodeX86* slow_path =
1901          new (GetGraph()->GetArena()) DivRemMinusOneSlowPathX86(out.As<Register>(), is_div);
1902      codegen_->AddSlowPath(slow_path);
1903
1904      // 0x80000000/-1 triggers an arithmetic exception!
1905      // Dividing by -1 is actually negation and -0x800000000 = 0x80000000 so
1906      // it's safe to just use negl instead of more complex comparisons.
1907
1908      __ cmpl(second_reg, Immediate(-1));
1909      __ j(kEqual, slow_path->GetEntryLabel());
1910
1911      // edx:eax <- sign-extended of eax
1912      __ cdq();
1913      // eax = quotient, edx = remainder
1914      __ idivl(second_reg);
1915
1916      __ Bind(slow_path->GetExitLabel());
1917      break;
1918    }
1919
1920    case Primitive::kPrimLong: {
1921      InvokeRuntimeCallingConvention calling_convention;
1922      DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>());
1923      DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>());
1924      DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>());
1925      DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>());
1926      DCHECK_EQ(EAX, out.AsRegisterPairLow<Register>());
1927      DCHECK_EQ(EDX, out.AsRegisterPairHigh<Register>());
1928
1929      if (is_div) {
1930        __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLdiv)));
1931      } else {
1932        __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLmod)));
1933      }
1934      uint32_t dex_pc = is_div
1935          ? instruction->AsDiv()->GetDexPc()
1936          : instruction->AsRem()->GetDexPc();
1937      codegen_->RecordPcInfo(instruction, dex_pc);
1938
1939      break;
1940    }
1941
1942    default:
1943      LOG(FATAL) << "Unexpected type for GenerateDivRemIntegral " << instruction->GetResultType();
1944  }
1945}
1946
1947void LocationsBuilderX86::VisitDiv(HDiv* div) {
1948  LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong
1949      ? LocationSummary::kCall
1950      : LocationSummary::kNoCall;
1951  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind);
1952
1953  switch (div->GetResultType()) {
1954    case Primitive::kPrimInt: {
1955      locations->SetInAt(0, Location::RegisterLocation(EAX));
1956      locations->SetInAt(1, Location::RequiresRegister());
1957      locations->SetOut(Location::SameAsFirstInput());
1958      // Intel uses edx:eax as the dividend.
1959      locations->AddTemp(Location::RegisterLocation(EDX));
1960      break;
1961    }
1962    case Primitive::kPrimLong: {
1963      InvokeRuntimeCallingConvention calling_convention;
1964      locations->SetInAt(0, Location::RegisterPairLocation(
1965          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
1966      locations->SetInAt(1, Location::RegisterPairLocation(
1967          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
1968      // Runtime helper puts the result in EAX, EDX.
1969      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
1970      break;
1971    }
1972    case Primitive::kPrimFloat:
1973    case Primitive::kPrimDouble: {
1974      locations->SetInAt(0, Location::RequiresFpuRegister());
1975      locations->SetInAt(1, Location::RequiresFpuRegister());
1976      locations->SetOut(Location::SameAsFirstInput());
1977      break;
1978    }
1979
1980    default:
1981      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1982  }
1983}
1984
1985void InstructionCodeGeneratorX86::VisitDiv(HDiv* div) {
1986  LocationSummary* locations = div->GetLocations();
1987  Location out = locations->Out();
1988  Location first = locations->InAt(0);
1989  Location second = locations->InAt(1);
1990
1991  switch (div->GetResultType()) {
1992    case Primitive::kPrimInt:
1993    case Primitive::kPrimLong: {
1994      GenerateDivRemIntegral(div);
1995      break;
1996    }
1997
1998    case Primitive::kPrimFloat: {
1999      DCHECK(first.Equals(out));
2000      __ divss(first.As<XmmRegister>(), second.As<XmmRegister>());
2001      break;
2002    }
2003
2004    case Primitive::kPrimDouble: {
2005      DCHECK(first.Equals(out));
2006      __ divsd(first.As<XmmRegister>(), second.As<XmmRegister>());
2007      break;
2008    }
2009
2010    default:
2011      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
2012  }
2013}
2014
2015void LocationsBuilderX86::VisitRem(HRem* rem) {
2016  LocationSummary::CallKind call_kind = rem->GetResultType() == Primitive::kPrimLong
2017      ? LocationSummary::kCall
2018      : LocationSummary::kNoCall;
2019  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind);
2020
2021  switch (rem->GetResultType()) {
2022    case Primitive::kPrimInt: {
2023      locations->SetInAt(0, Location::RegisterLocation(EAX));
2024      locations->SetInAt(1, Location::RequiresRegister());
2025      locations->SetOut(Location::RegisterLocation(EDX));
2026      break;
2027    }
2028    case Primitive::kPrimLong: {
2029      InvokeRuntimeCallingConvention calling_convention;
2030      locations->SetInAt(0, Location::RegisterPairLocation(
2031          calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1)));
2032      locations->SetInAt(1, Location::RegisterPairLocation(
2033          calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3)));
2034      // Runtime helper puts the result in EAX, EDX.
2035      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
2036      break;
2037    }
2038    case Primitive::kPrimFloat:
2039    case Primitive::kPrimDouble: {
2040      LOG(FATAL) << "Unimplemented rem type " << rem->GetResultType();
2041      break;
2042    }
2043
2044    default:
2045      LOG(FATAL) << "Unexpected rem type " << rem->GetResultType();
2046  }
2047}
2048
2049void InstructionCodeGeneratorX86::VisitRem(HRem* rem) {
2050  Primitive::Type type = rem->GetResultType();
2051  switch (type) {
2052    case Primitive::kPrimInt:
2053    case Primitive::kPrimLong: {
2054      GenerateDivRemIntegral(rem);
2055      break;
2056    }
2057    case Primitive::kPrimFloat:
2058    case Primitive::kPrimDouble: {
2059      LOG(FATAL) << "Unimplemented rem type " << type;
2060      break;
2061    }
2062    default:
2063      LOG(FATAL) << "Unexpected rem type " << type;
2064  }
2065}
2066
2067void LocationsBuilderX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2068  LocationSummary* locations =
2069      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2070  switch (instruction->GetType()) {
2071    case Primitive::kPrimInt: {
2072      locations->SetInAt(0, Location::Any());
2073      break;
2074    }
2075    case Primitive::kPrimLong: {
2076      locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0)));
2077      if (!instruction->IsConstant()) {
2078        locations->AddTemp(Location::RequiresRegister());
2079      }
2080      break;
2081    }
2082    default:
2083      LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType();
2084  }
2085  if (instruction->HasUses()) {
2086    locations->SetOut(Location::SameAsFirstInput());
2087  }
2088}
2089
2090void InstructionCodeGeneratorX86::VisitDivZeroCheck(HDivZeroCheck* instruction) {
2091  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathX86(instruction);
2092  codegen_->AddSlowPath(slow_path);
2093
2094  LocationSummary* locations = instruction->GetLocations();
2095  Location value = locations->InAt(0);
2096
2097  switch (instruction->GetType()) {
2098    case Primitive::kPrimInt: {
2099      if (value.IsRegister()) {
2100        __ testl(value.As<Register>(), value.As<Register>());
2101        __ j(kEqual, slow_path->GetEntryLabel());
2102      } else if (value.IsStackSlot()) {
2103        __ cmpl(Address(ESP, value.GetStackIndex()), Immediate(0));
2104        __ j(kEqual, slow_path->GetEntryLabel());
2105      } else {
2106        DCHECK(value.IsConstant()) << value;
2107        if (value.GetConstant()->AsIntConstant()->GetValue() == 0) {
2108        __ jmp(slow_path->GetEntryLabel());
2109        }
2110      }
2111      break;
2112    }
2113    case Primitive::kPrimLong: {
2114      if (value.IsRegisterPair()) {
2115        Register temp = locations->GetTemp(0).As<Register>();
2116        __ movl(temp, value.AsRegisterPairLow<Register>());
2117        __ orl(temp, value.AsRegisterPairHigh<Register>());
2118        __ j(kEqual, slow_path->GetEntryLabel());
2119      } else {
2120        DCHECK(value.IsConstant()) << value;
2121        if (value.GetConstant()->AsLongConstant()->GetValue() == 0) {
2122          __ jmp(slow_path->GetEntryLabel());
2123        }
2124      }
2125      break;
2126    }
2127    default:
2128      LOG(FATAL) << "Unexpected type for HDivZeroCheck" << instruction->GetType();
2129  }
2130}
2131
2132void LocationsBuilderX86::HandleShift(HBinaryOperation* op) {
2133  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2134
2135  LocationSummary* locations =
2136      new (GetGraph()->GetArena()) LocationSummary(op, LocationSummary::kNoCall);
2137
2138  switch (op->GetResultType()) {
2139    case Primitive::kPrimInt: {
2140      locations->SetInAt(0, Location::RequiresRegister());
2141      // The shift count needs to be in CL.
2142      locations->SetInAt(1, Location::ByteRegisterOrConstant(ECX, op->InputAt(1)));
2143      locations->SetOut(Location::SameAsFirstInput());
2144      break;
2145    }
2146    case Primitive::kPrimLong: {
2147      locations->SetInAt(0, Location::RequiresRegister());
2148      // The shift count needs to be in CL.
2149      locations->SetInAt(1, Location::RegisterLocation(ECX));
2150      locations->SetOut(Location::SameAsFirstInput());
2151      break;
2152    }
2153    default:
2154      LOG(FATAL) << "Unexpected op type " << op->GetResultType();
2155  }
2156}
2157
2158void InstructionCodeGeneratorX86::HandleShift(HBinaryOperation* op) {
2159  DCHECK(op->IsShl() || op->IsShr() || op->IsUShr());
2160
2161  LocationSummary* locations = op->GetLocations();
2162  Location first = locations->InAt(0);
2163  Location second = locations->InAt(1);
2164  DCHECK(first.Equals(locations->Out()));
2165
2166  switch (op->GetResultType()) {
2167    case Primitive::kPrimInt: {
2168      Register first_reg = first.As<Register>();
2169      if (second.IsRegister()) {
2170        Register second_reg = second.As<Register>();
2171        DCHECK_EQ(ECX, second_reg);
2172        if (op->IsShl()) {
2173          __ shll(first_reg, second_reg);
2174        } else if (op->IsShr()) {
2175          __ sarl(first_reg, second_reg);
2176        } else {
2177          __ shrl(first_reg, second_reg);
2178        }
2179      } else {
2180        Immediate imm(second.GetConstant()->AsIntConstant()->GetValue());
2181        if (op->IsShl()) {
2182          __ shll(first_reg, imm);
2183        } else if (op->IsShr()) {
2184          __ sarl(first_reg, imm);
2185        } else {
2186          __ shrl(first_reg, imm);
2187        }
2188      }
2189      break;
2190    }
2191    case Primitive::kPrimLong: {
2192      Register second_reg = second.As<Register>();
2193      DCHECK_EQ(ECX, second_reg);
2194      if (op->IsShl()) {
2195        GenerateShlLong(first, second_reg);
2196      } else if (op->IsShr()) {
2197        GenerateShrLong(first, second_reg);
2198      } else {
2199        GenerateUShrLong(first, second_reg);
2200      }
2201      break;
2202    }
2203    default:
2204      LOG(FATAL) << "Unexpected op type " << op->GetResultType();
2205  }
2206}
2207
2208void InstructionCodeGeneratorX86::GenerateShlLong(const Location& loc, Register shifter) {
2209  Label done;
2210  __ shld(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>(), shifter);
2211  __ shll(loc.AsRegisterPairLow<Register>(), shifter);
2212  __ testl(shifter, Immediate(32));
2213  __ j(kEqual, &done);
2214  __ movl(loc.AsRegisterPairHigh<Register>(), loc.AsRegisterPairLow<Register>());
2215  __ movl(loc.AsRegisterPairLow<Register>(), Immediate(0));
2216  __ Bind(&done);
2217}
2218
2219void InstructionCodeGeneratorX86::GenerateShrLong(const Location& loc, Register shifter) {
2220  Label done;
2221  __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
2222  __ sarl(loc.AsRegisterPairHigh<Register>(), shifter);
2223  __ testl(shifter, Immediate(32));
2224  __ j(kEqual, &done);
2225  __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
2226  __ sarl(loc.AsRegisterPairHigh<Register>(), Immediate(31));
2227  __ Bind(&done);
2228}
2229
2230void InstructionCodeGeneratorX86::GenerateUShrLong(const Location& loc, Register shifter) {
2231  Label done;
2232  __ shrd(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>(), shifter);
2233  __ shrl(loc.AsRegisterPairHigh<Register>(), shifter);
2234  __ testl(shifter, Immediate(32));
2235  __ j(kEqual, &done);
2236  __ movl(loc.AsRegisterPairLow<Register>(), loc.AsRegisterPairHigh<Register>());
2237  __ movl(loc.AsRegisterPairHigh<Register>(), Immediate(0));
2238  __ Bind(&done);
2239}
2240
2241void LocationsBuilderX86::VisitShl(HShl* shl) {
2242  HandleShift(shl);
2243}
2244
2245void InstructionCodeGeneratorX86::VisitShl(HShl* shl) {
2246  HandleShift(shl);
2247}
2248
2249void LocationsBuilderX86::VisitShr(HShr* shr) {
2250  HandleShift(shr);
2251}
2252
2253void InstructionCodeGeneratorX86::VisitShr(HShr* shr) {
2254  HandleShift(shr);
2255}
2256
2257void LocationsBuilderX86::VisitUShr(HUShr* ushr) {
2258  HandleShift(ushr);
2259}
2260
2261void InstructionCodeGeneratorX86::VisitUShr(HUShr* ushr) {
2262  HandleShift(ushr);
2263}
2264
2265void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
2266  LocationSummary* locations =
2267      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2268  locations->SetOut(Location::RegisterLocation(EAX));
2269  InvokeRuntimeCallingConvention calling_convention;
2270  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2271  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2272}
2273
2274void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
2275  InvokeRuntimeCallingConvention calling_convention;
2276  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2277  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
2278
2279  __ fs()->call(
2280      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
2281
2282  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2283  DCHECK(!codegen_->IsLeafMethod());
2284}
2285
2286void LocationsBuilderX86::VisitNewArray(HNewArray* instruction) {
2287  LocationSummary* locations =
2288      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
2289  locations->SetOut(Location::RegisterLocation(EAX));
2290  InvokeRuntimeCallingConvention calling_convention;
2291  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2292  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2293  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2294}
2295
2296void InstructionCodeGeneratorX86::VisitNewArray(HNewArray* instruction) {
2297  InvokeRuntimeCallingConvention calling_convention;
2298  codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1));
2299  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
2300
2301  __ fs()->call(
2302      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocArrayWithAccessCheck)));
2303
2304  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2305  DCHECK(!codegen_->IsLeafMethod());
2306}
2307
2308void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
2309  LocationSummary* locations =
2310      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2311  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
2312  if (location.IsStackSlot()) {
2313    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2314  } else if (location.IsDoubleStackSlot()) {
2315    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
2316  }
2317  locations->SetOut(location);
2318}
2319
2320void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
2321  UNUSED(instruction);
2322}
2323
2324void LocationsBuilderX86::VisitNot(HNot* not_) {
2325  LocationSummary* locations =
2326      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
2327  locations->SetInAt(0, Location::RequiresRegister());
2328  locations->SetOut(Location::SameAsFirstInput());
2329}
2330
2331void InstructionCodeGeneratorX86::VisitNot(HNot* not_) {
2332  LocationSummary* locations = not_->GetLocations();
2333  Location in = locations->InAt(0);
2334  Location out = locations->Out();
2335  DCHECK(in.Equals(out));
2336  switch (not_->InputAt(0)->GetType()) {
2337    case Primitive::kPrimBoolean:
2338      __ xorl(out.As<Register>(), Immediate(1));
2339      break;
2340
2341    case Primitive::kPrimInt:
2342      __ notl(out.As<Register>());
2343      break;
2344
2345    case Primitive::kPrimLong:
2346      __ notl(out.AsRegisterPairLow<Register>());
2347      __ notl(out.AsRegisterPairHigh<Register>());
2348      break;
2349
2350    default:
2351      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
2352  }
2353}
2354
2355void LocationsBuilderX86::VisitCompare(HCompare* compare) {
2356  LocationSummary* locations =
2357      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
2358  locations->SetInAt(0, Location::RequiresRegister());
2359  locations->SetInAt(1, Location::Any());
2360  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2361}
2362
2363void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
2364  LocationSummary* locations = compare->GetLocations();
2365  switch (compare->InputAt(0)->GetType()) {
2366    case Primitive::kPrimLong: {
2367      Label less, greater, done;
2368      Register output = locations->Out().As<Register>();
2369      Location left = locations->InAt(0);
2370      Location right = locations->InAt(1);
2371      if (right.IsRegister()) {
2372        __ cmpl(left.AsRegisterPairHigh<Register>(), right.AsRegisterPairHigh<Register>());
2373      } else {
2374        DCHECK(right.IsDoubleStackSlot());
2375        __ cmpl(left.AsRegisterPairHigh<Register>(),
2376                Address(ESP, right.GetHighStackIndex(kX86WordSize)));
2377      }
2378      __ j(kLess, &less);  // Signed compare.
2379      __ j(kGreater, &greater);  // Signed compare.
2380      if (right.IsRegisterPair()) {
2381        __ cmpl(left.AsRegisterPairLow<Register>(), right.AsRegisterPairLow<Register>());
2382      } else {
2383        DCHECK(right.IsDoubleStackSlot());
2384        __ cmpl(left.AsRegisterPairLow<Register>(), Address(ESP, right.GetStackIndex()));
2385      }
2386      __ movl(output, Immediate(0));
2387      __ j(kEqual, &done);
2388      __ j(kBelow, &less);  // Unsigned compare.
2389
2390      __ Bind(&greater);
2391      __ movl(output, Immediate(1));
2392      __ jmp(&done);
2393
2394      __ Bind(&less);
2395      __ movl(output, Immediate(-1));
2396
2397      __ Bind(&done);
2398      break;
2399    }
2400    default:
2401      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
2402  }
2403}
2404
2405void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
2406  LocationSummary* locations =
2407      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2408  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
2409    locations->SetInAt(i, Location::Any());
2410  }
2411  locations->SetOut(Location::Any());
2412}
2413
2414void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
2415  UNUSED(instruction);
2416  LOG(FATAL) << "Unreachable";
2417}
2418
2419void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2420  LocationSummary* locations =
2421      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2422  locations->SetInAt(0, Location::RequiresRegister());
2423  Primitive::Type field_type = instruction->GetFieldType();
2424  bool needs_write_barrier =
2425    CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
2426
2427  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
2428      || (field_type == Primitive::kPrimByte);
2429  // The register allocator does not support multiple
2430  // inputs that die at entry with one in a specific register.
2431  if (is_byte_type) {
2432    // Ensure the value is in a byte register.
2433    locations->SetInAt(1, Location::RegisterLocation(EAX));
2434  } else {
2435    locations->SetInAt(1, Location::RequiresRegister());
2436  }
2437  // Temporary registers for the write barrier.
2438  if (needs_write_barrier) {
2439    locations->AddTemp(Location::RequiresRegister());
2440    // Ensure the card is in a byte register.
2441    locations->AddTemp(Location::RegisterLocation(ECX));
2442  }
2443}
2444
2445void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
2446  LocationSummary* locations = instruction->GetLocations();
2447  Register obj = locations->InAt(0).As<Register>();
2448  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2449  Primitive::Type field_type = instruction->GetFieldType();
2450
2451  switch (field_type) {
2452    case Primitive::kPrimBoolean:
2453    case Primitive::kPrimByte: {
2454      ByteRegister value = locations->InAt(1).As<ByteRegister>();
2455      __ movb(Address(obj, offset), value);
2456      break;
2457    }
2458
2459    case Primitive::kPrimShort:
2460    case Primitive::kPrimChar: {
2461      Register value = locations->InAt(1).As<Register>();
2462      __ movw(Address(obj, offset), value);
2463      break;
2464    }
2465
2466    case Primitive::kPrimInt:
2467    case Primitive::kPrimNot: {
2468      Register value = locations->InAt(1).As<Register>();
2469      __ movl(Address(obj, offset), value);
2470
2471      if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
2472        Register temp = locations->GetTemp(0).As<Register>();
2473        Register card = locations->GetTemp(1).As<Register>();
2474        codegen_->MarkGCCard(temp, card, obj, value);
2475      }
2476      break;
2477    }
2478
2479    case Primitive::kPrimLong: {
2480      Location value = locations->InAt(1);
2481      __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
2482      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
2483      break;
2484    }
2485
2486    case Primitive::kPrimFloat: {
2487      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2488      __ movss(Address(obj, offset), value);
2489      break;
2490    }
2491
2492    case Primitive::kPrimDouble: {
2493      XmmRegister value = locations->InAt(1).As<XmmRegister>();
2494      __ movsd(Address(obj, offset), value);
2495      break;
2496    }
2497
2498    case Primitive::kPrimVoid:
2499      LOG(FATAL) << "Unreachable type " << field_type;
2500      UNREACHABLE();
2501  }
2502}
2503
2504void CodeGeneratorX86::MarkGCCard(Register temp, Register card, Register object, Register value) {
2505  Label is_null;
2506  __ testl(value, value);
2507  __ j(kEqual, &is_null);
2508  __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
2509  __ movl(temp, object);
2510  __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
2511  __ movb(Address(temp, card, TIMES_1, 0),
2512          X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
2513  __ Bind(&is_null);
2514}
2515
2516void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2517  LocationSummary* locations =
2518      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2519  locations->SetInAt(0, Location::RequiresRegister());
2520  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2521}
2522
2523void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
2524  LocationSummary* locations = instruction->GetLocations();
2525  Register obj = locations->InAt(0).As<Register>();
2526  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
2527
2528  switch (instruction->GetType()) {
2529    case Primitive::kPrimBoolean: {
2530      Register out = locations->Out().As<Register>();
2531      __ movzxb(out, Address(obj, offset));
2532      break;
2533    }
2534
2535    case Primitive::kPrimByte: {
2536      Register out = locations->Out().As<Register>();
2537      __ movsxb(out, Address(obj, offset));
2538      break;
2539    }
2540
2541    case Primitive::kPrimShort: {
2542      Register out = locations->Out().As<Register>();
2543      __ movsxw(out, Address(obj, offset));
2544      break;
2545    }
2546
2547    case Primitive::kPrimChar: {
2548      Register out = locations->Out().As<Register>();
2549      __ movzxw(out, Address(obj, offset));
2550      break;
2551    }
2552
2553    case Primitive::kPrimInt:
2554    case Primitive::kPrimNot: {
2555      Register out = locations->Out().As<Register>();
2556      __ movl(out, Address(obj, offset));
2557      break;
2558    }
2559
2560    case Primitive::kPrimLong: {
2561      // TODO: support volatile.
2562      __ movl(locations->Out().AsRegisterPairLow<Register>(), Address(obj, offset));
2563      __ movl(locations->Out().AsRegisterPairHigh<Register>(), Address(obj, kX86WordSize + offset));
2564      break;
2565    }
2566
2567    case Primitive::kPrimFloat: {
2568      XmmRegister out = locations->Out().As<XmmRegister>();
2569      __ movss(out, Address(obj, offset));
2570      break;
2571    }
2572
2573    case Primitive::kPrimDouble: {
2574      XmmRegister out = locations->Out().As<XmmRegister>();
2575      __ movsd(out, Address(obj, offset));
2576      break;
2577    }
2578
2579    case Primitive::kPrimVoid:
2580      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2581      UNREACHABLE();
2582  }
2583}
2584
2585void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
2586  LocationSummary* locations =
2587      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2588  locations->SetInAt(0, Location::Any());
2589  if (instruction->HasUses()) {
2590    locations->SetOut(Location::SameAsFirstInput());
2591  }
2592}
2593
2594void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
2595  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction);
2596  codegen_->AddSlowPath(slow_path);
2597
2598  LocationSummary* locations = instruction->GetLocations();
2599  Location obj = locations->InAt(0);
2600
2601  if (obj.IsRegister()) {
2602    __ cmpl(obj.As<Register>(), Immediate(0));
2603  } else if (obj.IsStackSlot()) {
2604    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
2605  } else {
2606    DCHECK(obj.IsConstant()) << obj;
2607    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
2608    __ jmp(slow_path->GetEntryLabel());
2609    return;
2610  }
2611  __ j(kEqual, slow_path->GetEntryLabel());
2612}
2613
2614void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
2615  LocationSummary* locations =
2616      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2617  locations->SetInAt(0, Location::RequiresRegister());
2618  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2619  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2620}
2621
2622void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
2623  LocationSummary* locations = instruction->GetLocations();
2624  Register obj = locations->InAt(0).As<Register>();
2625  Location index = locations->InAt(1);
2626
2627  switch (instruction->GetType()) {
2628    case Primitive::kPrimBoolean: {
2629      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2630      Register out = locations->Out().As<Register>();
2631      if (index.IsConstant()) {
2632        __ movzxb(out, Address(obj,
2633            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
2634      } else {
2635        __ movzxb(out, Address(obj, index.As<Register>(), TIMES_1, data_offset));
2636      }
2637      break;
2638    }
2639
2640    case Primitive::kPrimByte: {
2641      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
2642      Register out = locations->Out().As<Register>();
2643      if (index.IsConstant()) {
2644        __ movsxb(out, Address(obj,
2645            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
2646      } else {
2647        __ movsxb(out, Address(obj, index.As<Register>(), TIMES_1, data_offset));
2648      }
2649      break;
2650    }
2651
2652    case Primitive::kPrimShort: {
2653      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
2654      Register out = locations->Out().As<Register>();
2655      if (index.IsConstant()) {
2656        __ movsxw(out, Address(obj,
2657            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
2658      } else {
2659        __ movsxw(out, Address(obj, index.As<Register>(), TIMES_2, data_offset));
2660      }
2661      break;
2662    }
2663
2664    case Primitive::kPrimChar: {
2665      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2666      Register out = locations->Out().As<Register>();
2667      if (index.IsConstant()) {
2668        __ movzxw(out, Address(obj,
2669            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
2670      } else {
2671        __ movzxw(out, Address(obj, index.As<Register>(), TIMES_2, data_offset));
2672      }
2673      break;
2674    }
2675
2676    case Primitive::kPrimInt:
2677    case Primitive::kPrimNot: {
2678      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2679      Register out = locations->Out().As<Register>();
2680      if (index.IsConstant()) {
2681        __ movl(out, Address(obj,
2682            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
2683      } else {
2684        __ movl(out, Address(obj, index.As<Register>(), TIMES_4, data_offset));
2685      }
2686      break;
2687    }
2688
2689    case Primitive::kPrimLong: {
2690      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2691      Location out = locations->Out();
2692      if (index.IsConstant()) {
2693        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2694        __ movl(out.AsRegisterPairLow<Register>(), Address(obj, offset));
2695        __ movl(out.AsRegisterPairHigh<Register>(), Address(obj, offset + kX86WordSize));
2696      } else {
2697        __ movl(out.AsRegisterPairLow<Register>(),
2698                Address(obj, index.As<Register>(), TIMES_8, data_offset));
2699        __ movl(out.AsRegisterPairHigh<Register>(),
2700                Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize));
2701      }
2702      break;
2703    }
2704
2705    case Primitive::kPrimFloat:
2706    case Primitive::kPrimDouble:
2707      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2708      UNREACHABLE();
2709    case Primitive::kPrimVoid:
2710      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2711      UNREACHABLE();
2712  }
2713}
2714
2715void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
2716  Primitive::Type value_type = instruction->GetComponentType();
2717  bool needs_write_barrier =
2718      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2719
2720  DCHECK(kFollowsQuickABI);
2721  bool not_enough_registers = needs_write_barrier
2722      && !instruction->GetValue()->IsConstant()
2723      && !instruction->GetIndex()->IsConstant();
2724  bool needs_runtime_call = instruction->NeedsTypeCheck() || not_enough_registers;
2725
2726  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
2727      instruction,
2728      needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall);
2729
2730  if (needs_runtime_call) {
2731    InvokeRuntimeCallingConvention calling_convention;
2732    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
2733    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
2734    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
2735  } else {
2736    bool is_byte_type = (value_type == Primitive::kPrimBoolean)
2737        || (value_type == Primitive::kPrimByte);
2738    // We need the inputs to be different than the output in case of long operation.
2739    // In case of a byte operation, the register allocator does not support multiple
2740    // inputs that die at entry with one in a specific register.
2741    locations->SetInAt(0, Location::RequiresRegister());
2742    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
2743    if (is_byte_type) {
2744      // Ensure the value is in a byte register.
2745      locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, instruction->InputAt(2)));
2746    } else {
2747      locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)));
2748    }
2749    // Temporary registers for the write barrier.
2750    if (needs_write_barrier) {
2751      locations->AddTemp(Location::RequiresRegister());
2752      // Ensure the card is in a byte register.
2753      locations->AddTemp(Location::RegisterLocation(ECX));
2754    }
2755  }
2756}
2757
2758void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
2759  LocationSummary* locations = instruction->GetLocations();
2760  Register obj = locations->InAt(0).As<Register>();
2761  Location index = locations->InAt(1);
2762  Location value = locations->InAt(2);
2763  Primitive::Type value_type = instruction->GetComponentType();
2764  bool needs_runtime_call = locations->WillCall();
2765  bool needs_write_barrier =
2766      CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue());
2767
2768  switch (value_type) {
2769    case Primitive::kPrimBoolean:
2770    case Primitive::kPrimByte: {
2771      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
2772      if (index.IsConstant()) {
2773        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
2774        if (value.IsRegister()) {
2775          __ movb(Address(obj, offset), value.As<ByteRegister>());
2776        } else {
2777          __ movb(Address(obj, offset),
2778                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2779        }
2780      } else {
2781        if (value.IsRegister()) {
2782          __ movb(Address(obj, index.As<Register>(), TIMES_1, data_offset),
2783                  value.As<ByteRegister>());
2784        } else {
2785          __ movb(Address(obj, index.As<Register>(), TIMES_1, data_offset),
2786                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2787        }
2788      }
2789      break;
2790    }
2791
2792    case Primitive::kPrimShort:
2793    case Primitive::kPrimChar: {
2794      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
2795      if (index.IsConstant()) {
2796        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
2797        if (value.IsRegister()) {
2798          __ movw(Address(obj, offset), value.As<Register>());
2799        } else {
2800          __ movw(Address(obj, offset),
2801                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2802        }
2803      } else {
2804        if (value.IsRegister()) {
2805          __ movw(Address(obj, index.As<Register>(), TIMES_2, data_offset),
2806                  value.As<Register>());
2807        } else {
2808          __ movw(Address(obj, index.As<Register>(), TIMES_2, data_offset),
2809                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2810        }
2811      }
2812      break;
2813    }
2814
2815    case Primitive::kPrimInt:
2816    case Primitive::kPrimNot: {
2817      if (!needs_runtime_call) {
2818        uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
2819        if (index.IsConstant()) {
2820          size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
2821          if (value.IsRegister()) {
2822            __ movl(Address(obj, offset), value.As<Register>());
2823          } else {
2824            DCHECK(value.IsConstant()) << value;
2825            __ movl(Address(obj, offset),
2826                    Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2827          }
2828        } else {
2829          DCHECK(index.IsRegister()) << index;
2830          if (value.IsRegister()) {
2831            __ movl(Address(obj, index.As<Register>(), TIMES_4, data_offset),
2832                    value.As<Register>());
2833          } else {
2834            DCHECK(value.IsConstant()) << value;
2835            __ movl(Address(obj, index.As<Register>(), TIMES_4, data_offset),
2836                    Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
2837          }
2838        }
2839
2840        if (needs_write_barrier) {
2841          Register temp = locations->GetTemp(0).As<Register>();
2842          Register card = locations->GetTemp(1).As<Register>();
2843          codegen_->MarkGCCard(temp, card, obj, value.As<Register>());
2844        }
2845      } else {
2846        DCHECK_EQ(value_type, Primitive::kPrimNot);
2847        DCHECK(!codegen_->IsLeafMethod());
2848        __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
2849        codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
2850      }
2851      break;
2852    }
2853
2854    case Primitive::kPrimLong: {
2855      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
2856      if (index.IsConstant()) {
2857        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
2858        if (value.IsRegisterPair()) {
2859          __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
2860          __ movl(Address(obj, offset + kX86WordSize), value.AsRegisterPairHigh<Register>());
2861        } else {
2862          DCHECK(value.IsConstant());
2863          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
2864          __ movl(Address(obj, offset), Immediate(Low32Bits(val)));
2865          __ movl(Address(obj, offset + kX86WordSize), Immediate(High32Bits(val)));
2866        }
2867      } else {
2868        if (value.IsRegisterPair()) {
2869          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset),
2870                  value.AsRegisterPairLow<Register>());
2871          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize),
2872                  value.AsRegisterPairHigh<Register>());
2873        } else {
2874          DCHECK(value.IsConstant());
2875          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
2876          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset),
2877                  Immediate(Low32Bits(val)));
2878          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize),
2879                  Immediate(High32Bits(val)));
2880        }
2881      }
2882      break;
2883    }
2884
2885    case Primitive::kPrimFloat:
2886    case Primitive::kPrimDouble:
2887      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
2888      UNREACHABLE();
2889    case Primitive::kPrimVoid:
2890      LOG(FATAL) << "Unreachable type " << instruction->GetType();
2891      UNREACHABLE();
2892  }
2893}
2894
2895void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
2896  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
2897  locations->SetInAt(0, Location::RequiresRegister());
2898  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
2899  instruction->SetLocations(locations);
2900}
2901
2902void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
2903  LocationSummary* locations = instruction->GetLocations();
2904  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
2905  Register obj = locations->InAt(0).As<Register>();
2906  Register out = locations->Out().As<Register>();
2907  __ movl(out, Address(obj, offset));
2908}
2909
2910void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
2911  LocationSummary* locations =
2912      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
2913  locations->SetInAt(0, Location::RequiresRegister());
2914  locations->SetInAt(1, Location::RequiresRegister());
2915  if (instruction->HasUses()) {
2916    locations->SetOut(Location::SameAsFirstInput());
2917  }
2918}
2919
2920void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
2921  LocationSummary* locations = instruction->GetLocations();
2922  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(
2923      instruction, locations->InAt(0), locations->InAt(1));
2924  codegen_->AddSlowPath(slow_path);
2925
2926  Register index = locations->InAt(0).As<Register>();
2927  Register length = locations->InAt(1).As<Register>();
2928
2929  __ cmpl(index, length);
2930  __ j(kAboveEqual, slow_path->GetEntryLabel());
2931}
2932
2933void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
2934  temp->SetLocations(nullptr);
2935}
2936
2937void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
2938  // Nothing to do, this is driven by the code generator.
2939  UNUSED(temp);
2940}
2941
2942void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
2943  UNUSED(instruction);
2944  LOG(FATAL) << "Unreachable";
2945}
2946
2947void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
2948  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
2949}
2950
2951void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
2952  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
2953}
2954
2955void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
2956  HBasicBlock* block = instruction->GetBlock();
2957  if (block->GetLoopInformation() != nullptr) {
2958    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
2959    // The back edge will generate the suspend check.
2960    return;
2961  }
2962  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
2963    // The goto will generate the suspend check.
2964    return;
2965  }
2966  GenerateSuspendCheck(instruction, nullptr);
2967}
2968
2969void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
2970                                                       HBasicBlock* successor) {
2971  SuspendCheckSlowPathX86* slow_path =
2972      new (GetGraph()->GetArena()) SuspendCheckSlowPathX86(instruction, successor);
2973  codegen_->AddSlowPath(slow_path);
2974  __ fs()->cmpw(Address::Absolute(
2975      Thread::ThreadFlagsOffset<kX86WordSize>().Int32Value()), Immediate(0));
2976  if (successor == nullptr) {
2977    __ j(kNotEqual, slow_path->GetEntryLabel());
2978    __ Bind(slow_path->GetReturnLabel());
2979  } else {
2980    __ j(kEqual, codegen_->GetLabelOf(successor));
2981    __ jmp(slow_path->GetEntryLabel());
2982  }
2983}
2984
2985X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
2986  return codegen_->GetAssembler();
2987}
2988
2989void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
2990  ScratchRegisterScope ensure_scratch(
2991      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
2992  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
2993  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
2994  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
2995}
2996
2997void ParallelMoveResolverX86::EmitMove(size_t index) {
2998  MoveOperands* move = moves_.Get(index);
2999  Location source = move->GetSource();
3000  Location destination = move->GetDestination();
3001
3002  if (source.IsRegister()) {
3003    if (destination.IsRegister()) {
3004      __ movl(destination.As<Register>(), source.As<Register>());
3005    } else {
3006      DCHECK(destination.IsStackSlot());
3007      __ movl(Address(ESP, destination.GetStackIndex()), source.As<Register>());
3008    }
3009  } else if (source.IsStackSlot()) {
3010    if (destination.IsRegister()) {
3011      __ movl(destination.As<Register>(), Address(ESP, source.GetStackIndex()));
3012    } else {
3013      DCHECK(destination.IsStackSlot());
3014      MoveMemoryToMemory(destination.GetStackIndex(),
3015                         source.GetStackIndex());
3016    }
3017  } else if (source.IsConstant()) {
3018    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
3019    Immediate imm(instruction->AsIntConstant()->GetValue());
3020    if (destination.IsRegister()) {
3021      __ movl(destination.As<Register>(), imm);
3022    } else {
3023      __ movl(Address(ESP, destination.GetStackIndex()), imm);
3024    }
3025  } else {
3026    LOG(FATAL) << "Unimplemented";
3027  }
3028}
3029
3030void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
3031  Register suggested_scratch = reg == EAX ? EBX : EAX;
3032  ScratchRegisterScope ensure_scratch(
3033      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
3034
3035  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
3036  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
3037  __ movl(Address(ESP, mem + stack_offset), reg);
3038  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
3039}
3040
3041void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
3042  ScratchRegisterScope ensure_scratch1(
3043      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
3044
3045  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
3046  ScratchRegisterScope ensure_scratch2(
3047      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
3048
3049  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
3050  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
3051  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
3052  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
3053  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
3054  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
3055}
3056
3057void ParallelMoveResolverX86::EmitSwap(size_t index) {
3058  MoveOperands* move = moves_.Get(index);
3059  Location source = move->GetSource();
3060  Location destination = move->GetDestination();
3061
3062  if (source.IsRegister() && destination.IsRegister()) {
3063    __ xchgl(destination.As<Register>(), source.As<Register>());
3064  } else if (source.IsRegister() && destination.IsStackSlot()) {
3065    Exchange(source.As<Register>(), destination.GetStackIndex());
3066  } else if (source.IsStackSlot() && destination.IsRegister()) {
3067    Exchange(destination.As<Register>(), source.GetStackIndex());
3068  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
3069    Exchange(destination.GetStackIndex(), source.GetStackIndex());
3070  } else {
3071    LOG(FATAL) << "Unimplemented";
3072  }
3073}
3074
3075void ParallelMoveResolverX86::SpillScratch(int reg) {
3076  __ pushl(static_cast<Register>(reg));
3077}
3078
3079void ParallelMoveResolverX86::RestoreScratch(int reg) {
3080  __ popl(static_cast<Register>(reg));
3081}
3082
3083void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
3084  LocationSummary::CallKind call_kind = cls->CanCallRuntime()
3085      ? LocationSummary::kCallOnSlowPath
3086      : LocationSummary::kNoCall;
3087  LocationSummary* locations =
3088      new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
3089  locations->SetOut(Location::RequiresRegister());
3090}
3091
3092void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
3093  Register out = cls->GetLocations()->Out().As<Register>();
3094  if (cls->IsReferrersClass()) {
3095    DCHECK(!cls->CanCallRuntime());
3096    DCHECK(!cls->MustGenerateClinitCheck());
3097    codegen_->LoadCurrentMethod(out);
3098    __ movl(out, Address(out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()));
3099  } else {
3100    DCHECK(cls->CanCallRuntime());
3101    codegen_->LoadCurrentMethod(out);
3102    __ movl(out, Address(out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value()));
3103    __ movl(out, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())));
3104
3105    SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
3106        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
3107    codegen_->AddSlowPath(slow_path);
3108    __ testl(out, out);
3109    __ j(kEqual, slow_path->GetEntryLabel());
3110    if (cls->MustGenerateClinitCheck()) {
3111      GenerateClassInitializationCheck(slow_path, out);
3112    } else {
3113      __ Bind(slow_path->GetExitLabel());
3114    }
3115  }
3116}
3117
3118void LocationsBuilderX86::VisitClinitCheck(HClinitCheck* check) {
3119  LocationSummary* locations =
3120      new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath);
3121  locations->SetInAt(0, Location::RequiresRegister());
3122  if (check->HasUses()) {
3123    locations->SetOut(Location::SameAsFirstInput());
3124  }
3125}
3126
3127void InstructionCodeGeneratorX86::VisitClinitCheck(HClinitCheck* check) {
3128  // We assume the class to not be null.
3129  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathX86(
3130      check->GetLoadClass(), check, check->GetDexPc(), true);
3131  codegen_->AddSlowPath(slow_path);
3132  GenerateClassInitializationCheck(slow_path, check->GetLocations()->InAt(0).As<Register>());
3133}
3134
3135void InstructionCodeGeneratorX86::GenerateClassInitializationCheck(
3136    SlowPathCodeX86* slow_path, Register class_reg) {
3137  __ cmpl(Address(class_reg,  mirror::Class::StatusOffset().Int32Value()),
3138          Immediate(mirror::Class::kStatusInitialized));
3139  __ j(kLess, slow_path->GetEntryLabel());
3140  __ Bind(slow_path->GetExitLabel());
3141  // No need for memory fence, thanks to the X86 memory model.
3142}
3143
3144void LocationsBuilderX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3145  LocationSummary* locations =
3146      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3147  locations->SetInAt(0, Location::RequiresRegister());
3148  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
3149}
3150
3151void InstructionCodeGeneratorX86::VisitStaticFieldGet(HStaticFieldGet* instruction) {
3152  LocationSummary* locations = instruction->GetLocations();
3153  Register cls = locations->InAt(0).As<Register>();
3154  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
3155
3156  switch (instruction->GetType()) {
3157    case Primitive::kPrimBoolean: {
3158      Register out = locations->Out().As<Register>();
3159      __ movzxb(out, Address(cls, offset));
3160      break;
3161    }
3162
3163    case Primitive::kPrimByte: {
3164      Register out = locations->Out().As<Register>();
3165      __ movsxb(out, Address(cls, offset));
3166      break;
3167    }
3168
3169    case Primitive::kPrimShort: {
3170      Register out = locations->Out().As<Register>();
3171      __ movsxw(out, Address(cls, offset));
3172      break;
3173    }
3174
3175    case Primitive::kPrimChar: {
3176      Register out = locations->Out().As<Register>();
3177      __ movzxw(out, Address(cls, offset));
3178      break;
3179    }
3180
3181    case Primitive::kPrimInt:
3182    case Primitive::kPrimNot: {
3183      Register out = locations->Out().As<Register>();
3184      __ movl(out, Address(cls, offset));
3185      break;
3186    }
3187
3188    case Primitive::kPrimLong: {
3189      // TODO: support volatile.
3190      __ movl(locations->Out().AsRegisterPairLow<Register>(), Address(cls, offset));
3191      __ movl(locations->Out().AsRegisterPairHigh<Register>(), Address(cls, kX86WordSize + offset));
3192      break;
3193    }
3194
3195    case Primitive::kPrimFloat: {
3196      XmmRegister out = locations->Out().As<XmmRegister>();
3197      __ movss(out, Address(cls, offset));
3198      break;
3199    }
3200
3201    case Primitive::kPrimDouble: {
3202      XmmRegister out = locations->Out().As<XmmRegister>();
3203      __ movsd(out, Address(cls, offset));
3204      break;
3205    }
3206
3207    case Primitive::kPrimVoid:
3208      LOG(FATAL) << "Unreachable type " << instruction->GetType();
3209      UNREACHABLE();
3210  }
3211}
3212
3213void LocationsBuilderX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3214  LocationSummary* locations =
3215      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3216  locations->SetInAt(0, Location::RequiresRegister());
3217  Primitive::Type field_type = instruction->GetFieldType();
3218  bool needs_write_barrier =
3219      CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1));
3220  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
3221      || (field_type == Primitive::kPrimByte);
3222  // The register allocator does not support multiple
3223  // inputs that die at entry with one in a specific register.
3224  if (is_byte_type) {
3225    // Ensure the value is in a byte register.
3226    locations->SetInAt(1, Location::RegisterLocation(EAX));
3227  } else {
3228    locations->SetInAt(1, Location::RequiresRegister());
3229  }
3230  // Temporary registers for the write barrier.
3231  if (needs_write_barrier) {
3232    locations->AddTemp(Location::RequiresRegister());
3233    // Ensure the card is in a byte register.
3234    locations->AddTemp(Location::RegisterLocation(ECX));
3235  }
3236}
3237
3238void InstructionCodeGeneratorX86::VisitStaticFieldSet(HStaticFieldSet* instruction) {
3239  LocationSummary* locations = instruction->GetLocations();
3240  Register cls = locations->InAt(0).As<Register>();
3241  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
3242  Primitive::Type field_type = instruction->GetFieldType();
3243
3244  switch (field_type) {
3245    case Primitive::kPrimBoolean:
3246    case Primitive::kPrimByte: {
3247      ByteRegister value = locations->InAt(1).As<ByteRegister>();
3248      __ movb(Address(cls, offset), value);
3249      break;
3250    }
3251
3252    case Primitive::kPrimShort:
3253    case Primitive::kPrimChar: {
3254      Register value = locations->InAt(1).As<Register>();
3255      __ movw(Address(cls, offset), value);
3256      break;
3257    }
3258
3259    case Primitive::kPrimInt:
3260    case Primitive::kPrimNot: {
3261      Register value = locations->InAt(1).As<Register>();
3262      __ movl(Address(cls, offset), value);
3263
3264      if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) {
3265        Register temp = locations->GetTemp(0).As<Register>();
3266        Register card = locations->GetTemp(1).As<Register>();
3267        codegen_->MarkGCCard(temp, card, cls, value);
3268      }
3269      break;
3270    }
3271
3272    case Primitive::kPrimLong: {
3273      Location value = locations->InAt(1);
3274      __ movl(Address(cls, offset), value.AsRegisterPairLow<Register>());
3275      __ movl(Address(cls, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
3276      break;
3277    }
3278
3279    case Primitive::kPrimFloat: {
3280      XmmRegister value = locations->InAt(1).As<XmmRegister>();
3281      __ movss(Address(cls, offset), value);
3282      break;
3283    }
3284
3285    case Primitive::kPrimDouble: {
3286      XmmRegister value = locations->InAt(1).As<XmmRegister>();
3287      __ movsd(Address(cls, offset), value);
3288      break;
3289    }
3290
3291    case Primitive::kPrimVoid:
3292      LOG(FATAL) << "Unreachable type " << field_type;
3293      UNREACHABLE();
3294  }
3295}
3296
3297void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
3298  LocationSummary* locations =
3299      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
3300  locations->SetOut(Location::RequiresRegister());
3301}
3302
3303void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) {
3304  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load);
3305  codegen_->AddSlowPath(slow_path);
3306
3307  Register out = load->GetLocations()->Out().As<Register>();
3308  codegen_->LoadCurrentMethod(out);
3309  __ movl(out, Address(out, mirror::ArtMethod::DexCacheStringsOffset().Int32Value()));
3310  __ movl(out, Address(out, CodeGenerator::GetCacheOffset(load->GetStringIndex())));
3311  __ testl(out, out);
3312  __ j(kEqual, slow_path->GetEntryLabel());
3313  __ Bind(slow_path->GetExitLabel());
3314}
3315
3316void LocationsBuilderX86::VisitLoadException(HLoadException* load) {
3317  LocationSummary* locations =
3318      new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall);
3319  locations->SetOut(Location::RequiresRegister());
3320}
3321
3322void InstructionCodeGeneratorX86::VisitLoadException(HLoadException* load) {
3323  Address address = Address::Absolute(Thread::ExceptionOffset<kX86WordSize>().Int32Value());
3324  __ fs()->movl(load->GetLocations()->Out().As<Register>(), address);
3325  __ fs()->movl(address, Immediate(0));
3326}
3327
3328void LocationsBuilderX86::VisitThrow(HThrow* instruction) {
3329  LocationSummary* locations =
3330      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3331  InvokeRuntimeCallingConvention calling_convention;
3332  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3333}
3334
3335void InstructionCodeGeneratorX86::VisitThrow(HThrow* instruction) {
3336  __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pDeliverException)));
3337  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3338}
3339
3340void LocationsBuilderX86::VisitInstanceOf(HInstanceOf* instruction) {
3341  LocationSummary::CallKind call_kind = instruction->IsClassFinal()
3342      ? LocationSummary::kNoCall
3343      : LocationSummary::kCallOnSlowPath;
3344  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind);
3345  locations->SetInAt(0, Location::RequiresRegister());
3346  locations->SetInAt(1, Location::Any());
3347  locations->SetOut(Location::RequiresRegister());
3348}
3349
3350void InstructionCodeGeneratorX86::VisitInstanceOf(HInstanceOf* instruction) {
3351  LocationSummary* locations = instruction->GetLocations();
3352  Register obj = locations->InAt(0).As<Register>();
3353  Location cls = locations->InAt(1);
3354  Register out = locations->Out().As<Register>();
3355  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3356  Label done, zero;
3357  SlowPathCodeX86* slow_path = nullptr;
3358
3359  // Return 0 if `obj` is null.
3360  // TODO: avoid this check if we know obj is not null.
3361  __ testl(obj, obj);
3362  __ j(kEqual, &zero);
3363  __ movl(out, Address(obj, class_offset));
3364  // Compare the class of `obj` with `cls`.
3365  if (cls.IsRegister()) {
3366    __ cmpl(out, cls.As<Register>());
3367  } else {
3368    DCHECK(cls.IsStackSlot()) << cls;
3369    __ cmpl(out, Address(ESP, cls.GetStackIndex()));
3370  }
3371
3372  if (instruction->IsClassFinal()) {
3373    // Classes must be equal for the instanceof to succeed.
3374    __ j(kNotEqual, &zero);
3375    __ movl(out, Immediate(1));
3376    __ jmp(&done);
3377  } else {
3378    // If the classes are not equal, we go into a slow path.
3379    DCHECK(locations->OnlyCallsOnSlowPath());
3380    slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
3381        instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc());
3382    codegen_->AddSlowPath(slow_path);
3383    __ j(kNotEqual, slow_path->GetEntryLabel());
3384    __ movl(out, Immediate(1));
3385    __ jmp(&done);
3386  }
3387  __ Bind(&zero);
3388  __ movl(out, Immediate(0));
3389  if (slow_path != nullptr) {
3390    __ Bind(slow_path->GetExitLabel());
3391  }
3392  __ Bind(&done);
3393}
3394
3395void LocationsBuilderX86::VisitCheckCast(HCheckCast* instruction) {
3396  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
3397      instruction, LocationSummary::kCallOnSlowPath);
3398  locations->SetInAt(0, Location::RequiresRegister());
3399  locations->SetInAt(1, Location::Any());
3400  locations->AddTemp(Location::RequiresRegister());
3401}
3402
3403void InstructionCodeGeneratorX86::VisitCheckCast(HCheckCast* instruction) {
3404  LocationSummary* locations = instruction->GetLocations();
3405  Register obj = locations->InAt(0).As<Register>();
3406  Location cls = locations->InAt(1);
3407  Register temp = locations->GetTemp(0).As<Register>();
3408  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
3409  SlowPathCodeX86* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathX86(
3410      instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc());
3411  codegen_->AddSlowPath(slow_path);
3412
3413  // TODO: avoid this check if we know obj is not null.
3414  __ testl(obj, obj);
3415  __ j(kEqual, slow_path->GetExitLabel());
3416  __ movl(temp, Address(obj, class_offset));
3417
3418  // Compare the class of `obj` with `cls`.
3419  if (cls.IsRegister()) {
3420    __ cmpl(temp, cls.As<Register>());
3421  } else {
3422    DCHECK(cls.IsStackSlot()) << cls;
3423    __ cmpl(temp, Address(ESP, cls.GetStackIndex()));
3424  }
3425
3426  __ j(kNotEqual, slow_path->GetEntryLabel());
3427  __ Bind(slow_path->GetExitLabel());
3428}
3429
3430void LocationsBuilderX86::VisitMonitorOperation(HMonitorOperation* instruction) {
3431  LocationSummary* locations =
3432      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
3433  InvokeRuntimeCallingConvention calling_convention;
3434  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
3435}
3436
3437void InstructionCodeGeneratorX86::VisitMonitorOperation(HMonitorOperation* instruction) {
3438  __ fs()->call(Address::Absolute(instruction->IsEnter()
3439        ? QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pLockObject)
3440        : QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pUnlockObject)));
3441  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
3442}
3443
3444void LocationsBuilderX86::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); }
3445void LocationsBuilderX86::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); }
3446void LocationsBuilderX86::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); }
3447
3448void LocationsBuilderX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
3449  LocationSummary* locations =
3450      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
3451  DCHECK(instruction->GetResultType() == Primitive::kPrimInt
3452         || instruction->GetResultType() == Primitive::kPrimLong);
3453  locations->SetInAt(0, Location::RequiresRegister());
3454  locations->SetInAt(1, Location::Any());
3455  locations->SetOut(Location::SameAsFirstInput());
3456}
3457
3458void InstructionCodeGeneratorX86::VisitAnd(HAnd* instruction) {
3459  HandleBitwiseOperation(instruction);
3460}
3461
3462void InstructionCodeGeneratorX86::VisitOr(HOr* instruction) {
3463  HandleBitwiseOperation(instruction);
3464}
3465
3466void InstructionCodeGeneratorX86::VisitXor(HXor* instruction) {
3467  HandleBitwiseOperation(instruction);
3468}
3469
3470void InstructionCodeGeneratorX86::HandleBitwiseOperation(HBinaryOperation* instruction) {
3471  LocationSummary* locations = instruction->GetLocations();
3472  Location first = locations->InAt(0);
3473  Location second = locations->InAt(1);
3474  DCHECK(first.Equals(locations->Out()));
3475
3476  if (instruction->GetResultType() == Primitive::kPrimInt) {
3477    if (second.IsRegister()) {
3478      if (instruction->IsAnd()) {
3479        __ andl(first.As<Register>(), second.As<Register>());
3480      } else if (instruction->IsOr()) {
3481        __ orl(first.As<Register>(), second.As<Register>());
3482      } else {
3483        DCHECK(instruction->IsXor());
3484        __ xorl(first.As<Register>(), second.As<Register>());
3485      }
3486    } else if (second.IsConstant()) {
3487      if (instruction->IsAnd()) {
3488        __ andl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3489      } else if (instruction->IsOr()) {
3490        __ orl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3491      } else {
3492        DCHECK(instruction->IsXor());
3493        __ xorl(first.As<Register>(), Immediate(second.GetConstant()->AsIntConstant()->GetValue()));
3494      }
3495    } else {
3496      if (instruction->IsAnd()) {
3497        __ andl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
3498      } else if (instruction->IsOr()) {
3499        __ orl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
3500      } else {
3501        DCHECK(instruction->IsXor());
3502        __ xorl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
3503      }
3504    }
3505  } else {
3506    DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong);
3507    if (second.IsRegisterPair()) {
3508      if (instruction->IsAnd()) {
3509        __ andl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3510        __ andl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3511      } else if (instruction->IsOr()) {
3512        __ orl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3513        __ orl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3514      } else {
3515        DCHECK(instruction->IsXor());
3516        __ xorl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
3517        __ xorl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
3518      }
3519    } else {
3520      if (instruction->IsAnd()) {
3521        __ andl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3522        __ andl(first.AsRegisterPairHigh<Register>(),
3523                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3524      } else if (instruction->IsOr()) {
3525        __ orl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3526        __ orl(first.AsRegisterPairHigh<Register>(),
3527                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3528      } else {
3529        DCHECK(instruction->IsXor());
3530        __ xorl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
3531        __ xorl(first.AsRegisterPairHigh<Register>(),
3532                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
3533      }
3534    }
3535  }
3536}
3537
3538}  // namespace x86
3539}  // namespace art
3540