code_generator_x86.cc revision 56b9ee6fe1d6880c5fca0e7feb28b25a1ded2e2f
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/stack_checks.h"
27#include "utils/x86/assembler_x86.h"
28#include "utils/x86/managed_register_x86.h"
29
30namespace art {
31
32namespace x86 {
33
34static constexpr bool kExplicitStackOverflowCheck = false;
35
36static constexpr int kNumberOfPushedRegistersAtEntry = 1;
37static constexpr int kCurrentMethodStackOffset = 0;
38
39static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX };
40static constexpr size_t kRuntimeParameterCoreRegistersLength =
41    arraysize(kRuntimeParameterCoreRegisters);
42static constexpr XmmRegister kRuntimeParameterFpuRegisters[] = { };
43static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
44
45class InvokeRuntimeCallingConvention : public CallingConvention<Register, XmmRegister> {
46 public:
47  InvokeRuntimeCallingConvention()
48      : CallingConvention(kRuntimeParameterCoreRegisters,
49                          kRuntimeParameterCoreRegistersLength,
50                          kRuntimeParameterFpuRegisters,
51                          kRuntimeParameterFpuRegistersLength) {}
52
53 private:
54  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
55};
56
57#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
58
59class NullCheckSlowPathX86 : public SlowPathCode {
60 public:
61  explicit NullCheckSlowPathX86(HNullCheck* instruction) : instruction_(instruction) {}
62
63  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
64    __ Bind(GetEntryLabel());
65    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
66    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
67  }
68
69 private:
70  HNullCheck* const instruction_;
71  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
72};
73
74class StackOverflowCheckSlowPathX86 : public SlowPathCode {
75 public:
76  StackOverflowCheckSlowPathX86() {}
77
78  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
79    __ Bind(GetEntryLabel());
80    __ addl(ESP,
81            Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
82    __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow)));
83  }
84
85 private:
86  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86);
87};
88
89class BoundsCheckSlowPathX86 : public SlowPathCode {
90 public:
91  BoundsCheckSlowPathX86(HBoundsCheck* instruction,
92                         Location index_location,
93                         Location length_location)
94      : instruction_(instruction), index_location_(index_location), length_location_(length_location) {}
95
96  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
97    CodeGeneratorX86* x86_codegen = reinterpret_cast<CodeGeneratorX86*>(codegen);
98    __ Bind(GetEntryLabel());
99    InvokeRuntimeCallingConvention calling_convention;
100    x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_);
101    x86_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_);
102    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowArrayBounds)));
103    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
104  }
105
106 private:
107  HBoundsCheck* const instruction_;
108  const Location index_location_;
109  const Location length_location_;
110
111  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
112};
113
114class SuspendCheckSlowPathX86 : public SlowPathCode {
115 public:
116  explicit SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
117      : instruction_(instruction), successor_(successor) {}
118
119  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
120    __ Bind(GetEntryLabel());
121    codegen->SaveLiveRegisters(instruction_->GetLocations());
122    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pTestSuspend)));
123    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
124    codegen->RestoreLiveRegisters(instruction_->GetLocations());
125    if (successor_ == nullptr) {
126      __ jmp(GetReturnLabel());
127    } else {
128      __ jmp(codegen->GetLabelOf(successor_));
129    }
130  }
131
132  Label* GetReturnLabel() {
133    DCHECK(successor_ == nullptr);
134    return &return_label_;
135  }
136
137 private:
138  HSuspendCheck* const instruction_;
139  HBasicBlock* const successor_;
140  Label return_label_;
141
142  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
143};
144
145#undef __
146#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
147
148inline Condition X86Condition(IfCondition cond) {
149  switch (cond) {
150    case kCondEQ: return kEqual;
151    case kCondNE: return kNotEqual;
152    case kCondLT: return kLess;
153    case kCondLE: return kLessEqual;
154    case kCondGT: return kGreater;
155    case kCondGE: return kGreaterEqual;
156    default:
157      LOG(FATAL) << "Unknown if condition";
158  }
159  return kEqual;
160}
161
162void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
163  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
164}
165
166void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
167  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
168}
169
170void CodeGeneratorX86::SaveCoreRegister(Location stack_location, uint32_t reg_id) {
171  __ movl(Address(ESP, stack_location.GetStackIndex()), static_cast<Register>(reg_id));
172}
173
174void CodeGeneratorX86::RestoreCoreRegister(Location stack_location, uint32_t reg_id) {
175  __ movl(static_cast<Register>(reg_id), Address(ESP, stack_location.GetStackIndex()));
176}
177
178CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
179    : CodeGenerator(graph, kNumberOfRegIds),
180      location_builder_(graph, this),
181      instruction_visitor_(graph, this),
182      move_resolver_(graph->GetArena(), this) {}
183
184size_t CodeGeneratorX86::FrameEntrySpillSize() const {
185  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
186}
187
188static bool* GetBlockedRegisterPairs(bool* blocked_registers) {
189  return blocked_registers + kNumberOfAllocIds;
190}
191
192static bool* GetBlockedXmmRegisters(bool* blocked_registers) {
193  return blocked_registers + kNumberOfCpuRegisters;
194}
195
196Location CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type, bool* blocked_registers) const {
197  switch (type) {
198    case Primitive::kPrimLong: {
199      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
200      size_t reg = AllocateFreeRegisterInternal(blocked_register_pairs, kNumberOfRegisterPairs);
201      X86ManagedRegister pair =
202          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
203      blocked_registers[pair.AsRegisterPairLow()] = true;
204      blocked_registers[pair.AsRegisterPairHigh()] = true;
205      // Block all other register pairs that share a register with `pair`.
206      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
207        X86ManagedRegister current =
208            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
209        if (current.AsRegisterPairLow() == pair.AsRegisterPairLow()
210            || current.AsRegisterPairLow() == pair.AsRegisterPairHigh()
211            || current.AsRegisterPairHigh() == pair.AsRegisterPairLow()
212            || current.AsRegisterPairHigh() == pair.AsRegisterPairHigh()) {
213          blocked_register_pairs[i] = true;
214        }
215      }
216      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
217    }
218
219    case Primitive::kPrimByte:
220    case Primitive::kPrimBoolean:
221    case Primitive::kPrimChar:
222    case Primitive::kPrimShort:
223    case Primitive::kPrimInt:
224    case Primitive::kPrimNot: {
225      Register reg = static_cast<Register>(
226          AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters));
227      // Block all register pairs that contain `reg`.
228      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
229      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
230        X86ManagedRegister current =
231            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
232        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
233          blocked_register_pairs[i] = true;
234        }
235      }
236      return Location::RegisterLocation(reg);
237    }
238
239    case Primitive::kPrimFloat:
240    case Primitive::kPrimDouble: {
241      return Location::FpuRegisterLocation(AllocateFreeRegisterInternal(
242          GetBlockedXmmRegisters(blocked_registers), kNumberOfXmmRegisters));
243    }
244
245    case Primitive::kPrimVoid:
246      LOG(FATAL) << "Unreachable type " << type;
247  }
248
249  return Location();
250}
251
252void CodeGeneratorX86::SetupBlockedRegisters(bool* blocked_registers) const {
253  bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
254
255  // Don't allocate the dalvik style register pair passing.
256  blocked_register_pairs[ECX_EDX] = true;
257
258  // Stack register is always reserved.
259  blocked_registers[ESP] = true;
260
261  // TODO: We currently don't use Quick's callee saved registers.
262  blocked_registers[EBP] = true;
263  blocked_registers[ESI] = true;
264  blocked_registers[EDI] = true;
265  blocked_register_pairs[EAX_EDI] = true;
266  blocked_register_pairs[EDX_EDI] = true;
267  blocked_register_pairs[ECX_EDI] = true;
268  blocked_register_pairs[EBX_EDI] = true;
269}
270
271size_t CodeGeneratorX86::GetNumberOfRegisters() const {
272  return kNumberOfRegIds;
273}
274
275InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
276      : HGraphVisitor(graph),
277        assembler_(codegen->GetAssembler()),
278        codegen_(codegen) {}
279
280void CodeGeneratorX86::GenerateFrameEntry() {
281  // Create a fake register to mimic Quick.
282  static const int kFakeReturnRegister = 8;
283  core_spill_mask_ |= (1 << kFakeReturnRegister);
284
285  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
286  if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
287    __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
288    RecordPcInfo(nullptr, 0);
289  }
290
291  // The return PC has already been pushed on the stack.
292  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
293
294  if (!skip_overflow_check && kExplicitStackOverflowCheck) {
295    SlowPathCode* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
296    AddSlowPath(slow_path);
297
298    __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>()));
299    __ j(kLess, slow_path->GetEntryLabel());
300  }
301
302  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
303}
304
305void CodeGeneratorX86::GenerateFrameExit() {
306  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
307}
308
309void CodeGeneratorX86::Bind(Label* label) {
310  __ Bind(label);
311}
312
313void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) {
314  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
315}
316
317Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
318  switch (load->GetType()) {
319    case Primitive::kPrimLong:
320    case Primitive::kPrimDouble:
321      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
322      break;
323
324    case Primitive::kPrimInt:
325    case Primitive::kPrimNot:
326    case Primitive::kPrimFloat:
327      return Location::StackSlot(GetStackSlot(load->GetLocal()));
328
329    case Primitive::kPrimBoolean:
330    case Primitive::kPrimByte:
331    case Primitive::kPrimChar:
332    case Primitive::kPrimShort:
333    case Primitive::kPrimVoid:
334      LOG(FATAL) << "Unexpected type " << load->GetType();
335  }
336
337  LOG(FATAL) << "Unreachable";
338  return Location();
339}
340
341Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
342  switch (type) {
343    case Primitive::kPrimBoolean:
344    case Primitive::kPrimByte:
345    case Primitive::kPrimChar:
346    case Primitive::kPrimShort:
347    case Primitive::kPrimInt:
348    case Primitive::kPrimFloat:
349    case Primitive::kPrimNot: {
350      uint32_t index = gp_index_++;
351      if (index < calling_convention.GetNumberOfRegisters()) {
352        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
353      } else {
354        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
355      }
356    }
357
358    case Primitive::kPrimLong:
359    case Primitive::kPrimDouble: {
360      uint32_t index = gp_index_;
361      gp_index_ += 2;
362      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
363        X86ManagedRegister pair = X86ManagedRegister::FromRegisterPair(
364            calling_convention.GetRegisterPairAt(index));
365        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
366      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
367        return Location::QuickParameter(index);
368      } else {
369        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
370      }
371    }
372
373    case Primitive::kPrimVoid:
374      LOG(FATAL) << "Unexpected parameter type " << type;
375      break;
376  }
377  return Location();
378}
379
380void CodeGeneratorX86::Move32(Location destination, Location source) {
381  if (source.Equals(destination)) {
382    return;
383  }
384  if (destination.IsRegister()) {
385    if (source.IsRegister()) {
386      __ movl(destination.As<Register>(), source.As<Register>());
387    } else if (source.IsFpuRegister()) {
388      __ movd(destination.As<Register>(), source.As<XmmRegister>());
389    } else {
390      DCHECK(source.IsStackSlot());
391      __ movl(destination.As<Register>(), Address(ESP, source.GetStackIndex()));
392    }
393  } else if (destination.IsFpuRegister()) {
394    if (source.IsRegister()) {
395      __ movd(destination.As<XmmRegister>(), source.As<Register>());
396    } else if (source.IsFpuRegister()) {
397      __ movaps(destination.As<XmmRegister>(), source.As<XmmRegister>());
398    } else {
399      DCHECK(source.IsStackSlot());
400      __ movss(destination.As<XmmRegister>(), Address(ESP, source.GetStackIndex()));
401    }
402  } else {
403    DCHECK(destination.IsStackSlot());
404    if (source.IsRegister()) {
405      __ movl(Address(ESP, destination.GetStackIndex()), source.As<Register>());
406    } else if (source.IsFpuRegister()) {
407      __ movss(Address(ESP, destination.GetStackIndex()), source.As<XmmRegister>());
408    } else {
409      DCHECK(source.IsStackSlot());
410      __ pushl(Address(ESP, source.GetStackIndex()));
411      __ popl(Address(ESP, destination.GetStackIndex()));
412    }
413  }
414}
415
416void CodeGeneratorX86::Move64(Location destination, Location source) {
417  if (source.Equals(destination)) {
418    return;
419  }
420  if (destination.IsRegisterPair()) {
421    if (source.IsRegisterPair()) {
422      __ movl(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
423      __ movl(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
424    } else if (source.IsFpuRegister()) {
425      LOG(FATAL) << "Unimplemented";
426    } else if (source.IsQuickParameter()) {
427      uint32_t argument_index = source.GetQuickParameterIndex();
428      InvokeDexCallingConvention calling_convention;
429      __ movl(destination.AsRegisterPairLow<Register>(),
430              calling_convention.GetRegisterAt(argument_index));
431      __ movl(destination.AsRegisterPairHigh<Register>(), Address(ESP,
432          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
433    } else {
434      DCHECK(source.IsDoubleStackSlot());
435      __ movl(destination.AsRegisterPairLow<Register>(), Address(ESP, source.GetStackIndex()));
436      __ movl(destination.AsRegisterPairHigh<Register>(),
437              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
438    }
439  } else if (destination.IsQuickParameter()) {
440    InvokeDexCallingConvention calling_convention;
441    uint32_t argument_index = destination.GetQuickParameterIndex();
442    if (source.IsRegister()) {
443      __ movl(calling_convention.GetRegisterAt(argument_index), source.AsRegisterPairLow<Register>());
444      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)),
445              source.AsRegisterPairHigh<Register>());
446    } else if (source.IsFpuRegister()) {
447      LOG(FATAL) << "Unimplemented";
448    } else {
449      DCHECK(source.IsDoubleStackSlot());
450      __ movl(calling_convention.GetRegisterAt(argument_index),
451              Address(ESP, source.GetStackIndex()));
452      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
453      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)));
454    }
455  } else if (destination.IsFpuRegister()) {
456    if (source.IsDoubleStackSlot()) {
457      __ movsd(destination.As<XmmRegister>(), Address(ESP, source.GetStackIndex()));
458    } else {
459      LOG(FATAL) << "Unimplemented";
460    }
461  } else {
462    DCHECK(destination.IsDoubleStackSlot());
463    if (source.IsRegisterPair()) {
464      __ movl(Address(ESP, destination.GetStackIndex()), source.AsRegisterPairLow<Register>());
465      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
466              source.AsRegisterPairHigh<Register>());
467    } else if (source.IsQuickParameter()) {
468      InvokeDexCallingConvention calling_convention;
469      uint32_t argument_index = source.GetQuickParameterIndex();
470      __ movl(Address(ESP, destination.GetStackIndex()),
471              calling_convention.GetRegisterAt(argument_index));
472      DCHECK_EQ(calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize(),
473                static_cast<size_t>(destination.GetHighStackIndex(kX86WordSize)));
474    } else if (source.IsFpuRegister()) {
475      __ movsd(Address(ESP, destination.GetStackIndex()), source.As<XmmRegister>());
476    } else {
477      DCHECK(source.IsDoubleStackSlot());
478      __ pushl(Address(ESP, source.GetStackIndex()));
479      __ popl(Address(ESP, destination.GetStackIndex()));
480      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
481      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
482    }
483  }
484}
485
486void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
487  if (instruction->AsIntConstant() != nullptr) {
488    Immediate imm(instruction->AsIntConstant()->GetValue());
489    if (location.IsRegister()) {
490      __ movl(location.As<Register>(), imm);
491    } else {
492      __ movl(Address(ESP, location.GetStackIndex()), imm);
493    }
494  } else if (instruction->AsLongConstant() != nullptr) {
495    int64_t value = instruction->AsLongConstant()->GetValue();
496    if (location.IsRegister()) {
497      __ movl(location.AsRegisterPairLow<Register>(), Immediate(Low32Bits(value)));
498      __ movl(location.AsRegisterPairHigh<Register>(), Immediate(High32Bits(value)));
499    } else {
500      __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
501      __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
502    }
503  } else if (instruction->AsLoadLocal() != nullptr) {
504    int slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
505    switch (instruction->GetType()) {
506      case Primitive::kPrimBoolean:
507      case Primitive::kPrimByte:
508      case Primitive::kPrimChar:
509      case Primitive::kPrimShort:
510      case Primitive::kPrimInt:
511      case Primitive::kPrimNot:
512      case Primitive::kPrimFloat:
513        Move32(location, Location::StackSlot(slot));
514        break;
515
516      case Primitive::kPrimLong:
517      case Primitive::kPrimDouble:
518        Move64(location, Location::DoubleStackSlot(slot));
519        break;
520
521      default:
522        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
523    }
524  } else {
525    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
526    switch (instruction->GetType()) {
527      case Primitive::kPrimBoolean:
528      case Primitive::kPrimByte:
529      case Primitive::kPrimChar:
530      case Primitive::kPrimShort:
531      case Primitive::kPrimInt:
532      case Primitive::kPrimNot:
533      case Primitive::kPrimFloat:
534        Move32(location, instruction->GetLocations()->Out());
535        break;
536
537      case Primitive::kPrimLong:
538      case Primitive::kPrimDouble:
539        Move64(location, instruction->GetLocations()->Out());
540        break;
541
542      default:
543        LOG(FATAL) << "Unexpected type " << instruction->GetType();
544    }
545  }
546}
547
548void LocationsBuilderX86::VisitGoto(HGoto* got) {
549  got->SetLocations(nullptr);
550}
551
552void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
553  HBasicBlock* successor = got->GetSuccessor();
554  DCHECK(!successor->IsExitBlock());
555
556  HBasicBlock* block = got->GetBlock();
557  HInstruction* previous = got->GetPrevious();
558
559  HLoopInformation* info = block->GetLoopInformation();
560  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
561    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
562    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
563    return;
564  }
565
566  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
567    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
568  }
569  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
570    __ jmp(codegen_->GetLabelOf(successor));
571  }
572}
573
574void LocationsBuilderX86::VisitExit(HExit* exit) {
575  exit->SetLocations(nullptr);
576}
577
578void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
579  if (kIsDebugBuild) {
580    __ Comment("Unreachable");
581    __ int3();
582  }
583}
584
585void LocationsBuilderX86::VisitIf(HIf* if_instr) {
586  LocationSummary* locations =
587      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
588  HInstruction* cond = if_instr->InputAt(0);
589  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
590    locations->SetInAt(0, Location::Any(), Location::kDiesAtEntry);
591  }
592}
593
594void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
595  HInstruction* cond = if_instr->InputAt(0);
596  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
597    // Moves do not affect the eflags register, so if the condition is evaluated
598    // just before the if, we don't need to evaluate it again.
599    if (!cond->IsCondition() || !cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr)) {
600      // Materialized condition, compare against 0.
601      Location lhs = if_instr->GetLocations()->InAt(0);
602      if (lhs.IsRegister()) {
603        __ cmpl(lhs.As<Register>(), Immediate(0));
604      } else {
605        __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
606      }
607    }
608    __ j(kNotEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
609  } else {
610    Location lhs = cond->GetLocations()->InAt(0);
611    Location rhs = cond->GetLocations()->InAt(1);
612    // LHS is guaranteed to be in a register (see LocationsBuilderX86::VisitCondition).
613    if (rhs.IsRegister()) {
614      __ cmpl(lhs.As<Register>(), rhs.As<Register>());
615    } else if (rhs.IsConstant()) {
616      HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
617      Immediate imm(instruction->AsIntConstant()->GetValue());
618      __ cmpl(lhs.As<Register>(), imm);
619    } else {
620      __ cmpl(lhs.As<Register>(), Address(ESP, rhs.GetStackIndex()));
621    }
622    __ j(X86Condition(cond->AsCondition()->GetCondition()),
623         codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
624  }
625  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
626    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
627  }
628}
629
630void LocationsBuilderX86::VisitLocal(HLocal* local) {
631  local->SetLocations(nullptr);
632}
633
634void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
635  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
636}
637
638void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
639  local->SetLocations(nullptr);
640}
641
642void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
643  // Nothing to do, this is driven by the code generator.
644}
645
646void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
647  LocationSummary* locations =
648      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
649  switch (store->InputAt(1)->GetType()) {
650    case Primitive::kPrimBoolean:
651    case Primitive::kPrimByte:
652    case Primitive::kPrimChar:
653    case Primitive::kPrimShort:
654    case Primitive::kPrimInt:
655    case Primitive::kPrimNot:
656    case Primitive::kPrimFloat:
657      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
658      break;
659
660    case Primitive::kPrimLong:
661    case Primitive::kPrimDouble:
662      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
663      break;
664
665    default:
666      LOG(FATAL) << "Unknown local type " << store->InputAt(1)->GetType();
667  }
668  store->SetLocations(locations);
669}
670
671void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
672}
673
674void LocationsBuilderX86::VisitCondition(HCondition* comp) {
675  LocationSummary* locations =
676      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
677  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
678  locations->SetInAt(1, Location::Any(), Location::kDiesAtEntry);
679  if (comp->NeedsMaterialization()) {
680    locations->SetOut(Location::RequiresRegister());
681  }
682}
683
684void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
685  if (comp->NeedsMaterialization()) {
686    LocationSummary* locations = comp->GetLocations();
687    Register reg = locations->Out().As<Register>();
688    // Clear register: setcc only sets the low byte.
689    __ xorl(reg, reg);
690    if (locations->InAt(1).IsRegister()) {
691      __ cmpl(locations->InAt(0).As<Register>(),
692              locations->InAt(1).As<Register>());
693    } else if (locations->InAt(1).IsConstant()) {
694      HConstant* instruction = locations->InAt(1).GetConstant();
695      Immediate imm(instruction->AsIntConstant()->GetValue());
696      __ cmpl(locations->InAt(0).As<Register>(), imm);
697    } else {
698      __ cmpl(locations->InAt(0).As<Register>(),
699              Address(ESP, locations->InAt(1).GetStackIndex()));
700    }
701    __ setb(X86Condition(comp->GetCondition()), reg);
702  }
703}
704
705void LocationsBuilderX86::VisitEqual(HEqual* comp) {
706  VisitCondition(comp);
707}
708
709void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
710  VisitCondition(comp);
711}
712
713void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
714  VisitCondition(comp);
715}
716
717void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
718  VisitCondition(comp);
719}
720
721void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
722  VisitCondition(comp);
723}
724
725void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
726  VisitCondition(comp);
727}
728
729void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
730  VisitCondition(comp);
731}
732
733void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
734  VisitCondition(comp);
735}
736
737void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
738  VisitCondition(comp);
739}
740
741void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
742  VisitCondition(comp);
743}
744
745void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
746  VisitCondition(comp);
747}
748
749void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
750  VisitCondition(comp);
751}
752
753void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
754  LocationSummary* locations =
755      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
756  locations->SetOut(Location::ConstantLocation(constant));
757}
758
759void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
760}
761
762void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
763  LocationSummary* locations =
764      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
765  locations->SetOut(Location::ConstantLocation(constant));
766}
767
768void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
769  // Will be generated at use site.
770}
771
772void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
773  ret->SetLocations(nullptr);
774}
775
776void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
777  codegen_->GenerateFrameExit();
778  __ ret();
779}
780
781void LocationsBuilderX86::VisitReturn(HReturn* ret) {
782  LocationSummary* locations =
783      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
784  switch (ret->InputAt(0)->GetType()) {
785    case Primitive::kPrimBoolean:
786    case Primitive::kPrimByte:
787    case Primitive::kPrimChar:
788    case Primitive::kPrimShort:
789    case Primitive::kPrimInt:
790    case Primitive::kPrimNot:
791      locations->SetInAt(0, Location::RegisterLocation(EAX));
792      break;
793
794    case Primitive::kPrimLong:
795      locations->SetInAt(
796          0, Location::RegisterPairLocation(EAX, EDX));
797      break;
798
799    case Primitive::kPrimFloat:
800    case Primitive::kPrimDouble:
801      locations->SetInAt(
802          0, Location::FpuRegisterLocation(XMM0));
803      break;
804
805    default:
806      LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
807  }
808}
809
810void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
811  if (kIsDebugBuild) {
812    switch (ret->InputAt(0)->GetType()) {
813      case Primitive::kPrimBoolean:
814      case Primitive::kPrimByte:
815      case Primitive::kPrimChar:
816      case Primitive::kPrimShort:
817      case Primitive::kPrimInt:
818      case Primitive::kPrimNot:
819        DCHECK_EQ(ret->GetLocations()->InAt(0).As<Register>(), EAX);
820        break;
821
822      case Primitive::kPrimLong:
823        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), EAX);
824        DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), EDX);
825        break;
826
827      case Primitive::kPrimFloat:
828      case Primitive::kPrimDouble:
829        DCHECK_EQ(ret->GetLocations()->InAt(0).As<XmmRegister>(), XMM0);
830        break;
831
832      default:
833        LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
834    }
835  }
836  codegen_->GenerateFrameExit();
837  __ ret();
838}
839
840void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
841  HandleInvoke(invoke);
842}
843
844void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
845  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
846  uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
847  size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() +
848      invoke->GetIndexInDexCache() * kX86WordSize;
849
850  // TODO: Implement all kinds of calls:
851  // 1) boot -> boot
852  // 2) app -> boot
853  // 3) app -> app
854  //
855  // Currently we implement the app -> app logic, which looks up in the resolve cache.
856
857  // temp = method;
858  LoadCurrentMethod(temp);
859  // temp = temp->dex_cache_resolved_methods_;
860  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
861  // temp = temp[index_in_cache]
862  __ movl(temp, Address(temp, index_in_cache));
863  // (temp + offset_of_quick_compiled_code)()
864  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
865
866  DCHECK(!codegen_->IsLeafMethod());
867  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
868}
869
870void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
871  HandleInvoke(invoke);
872}
873
874void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
875  LocationSummary* locations =
876      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
877  locations->AddTemp(Location::RegisterLocation(EAX));
878
879  InvokeDexCallingConventionVisitor calling_convention_visitor;
880  for (size_t i = 0; i < invoke->InputCount(); i++) {
881    HInstruction* input = invoke->InputAt(i);
882    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
883  }
884
885  switch (invoke->GetType()) {
886    case Primitive::kPrimBoolean:
887    case Primitive::kPrimByte:
888    case Primitive::kPrimChar:
889    case Primitive::kPrimShort:
890    case Primitive::kPrimInt:
891    case Primitive::kPrimNot:
892      locations->SetOut(Location::RegisterLocation(EAX));
893      break;
894
895    case Primitive::kPrimLong:
896      locations->SetOut(Location::RegisterPairLocation(EAX, EDX));
897      break;
898
899    case Primitive::kPrimVoid:
900      break;
901
902    case Primitive::kPrimDouble:
903    case Primitive::kPrimFloat:
904      locations->SetOut(Location::FpuRegisterLocation(XMM0));
905      break;
906  }
907
908  invoke->SetLocations(locations);
909}
910
911void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
912  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
913  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
914          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
915  LocationSummary* locations = invoke->GetLocations();
916  Location receiver = locations->InAt(0);
917  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
918  // temp = object->GetClass();
919  if (receiver.IsStackSlot()) {
920    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
921    __ movl(temp, Address(temp, class_offset));
922  } else {
923    __ movl(temp, Address(receiver.As<Register>(), class_offset));
924  }
925  // temp = temp->GetMethodAt(method_offset);
926  __ movl(temp, Address(temp, method_offset));
927  // call temp->GetEntryPoint();
928  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
929
930  DCHECK(!codegen_->IsLeafMethod());
931  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
932}
933
934void LocationsBuilderX86::VisitAdd(HAdd* add) {
935  LocationSummary* locations =
936      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
937  switch (add->GetResultType()) {
938    case Primitive::kPrimInt:
939    case Primitive::kPrimLong: {
940      locations->SetInAt(0, Location::RequiresRegister());
941      locations->SetInAt(1, Location::Any());
942      locations->SetOut(Location::SameAsFirstInput());
943      break;
944    }
945
946    case Primitive::kPrimFloat:
947    case Primitive::kPrimDouble: {
948      locations->SetInAt(0, Location::RequiresFpuRegister());
949      locations->SetInAt(1, Location::Any());
950      locations->SetOut(Location::SameAsFirstInput());
951      break;
952    }
953
954    default:
955      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
956      break;
957  }
958}
959
960void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
961  LocationSummary* locations = add->GetLocations();
962  Location first = locations->InAt(0);
963  Location second = locations->InAt(1);
964
965  switch (add->GetResultType()) {
966    case Primitive::kPrimInt: {
967      DCHECK_EQ(first.As<Register>(), locations->Out().As<Register>());
968      if (second.IsRegister()) {
969        __ addl(first.As<Register>(), second.As<Register>());
970      } else if (second.IsConstant()) {
971        HConstant* instruction = second.GetConstant();
972        Immediate imm(instruction->AsIntConstant()->GetValue());
973        __ addl(first.As<Register>(), imm);
974      } else {
975        __ addl(first.As<Register>(), Address(ESP, second.GetStackIndex()));
976      }
977      break;
978    }
979
980    case Primitive::kPrimLong: {
981      DCHECK_EQ(first.AsRegisterPairLow<Register>(),
982                locations->Out().AsRegisterPairLow<Register>());
983      DCHECK_EQ(first.AsRegisterPairHigh<Register>(),
984                locations->Out().AsRegisterPairHigh<Register>());
985      if (second.IsRegister()) {
986        __ addl(first.AsRegisterPairLow<Register>(), second.AsRegisterPairLow<Register>());
987        __ adcl(first.AsRegisterPairHigh<Register>(), second.AsRegisterPairHigh<Register>());
988      } else {
989        __ addl(first.AsRegisterPairLow<Register>(), Address(ESP, second.GetStackIndex()));
990        __ adcl(first.AsRegisterPairHigh<Register>(),
991                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
992      }
993      break;
994    }
995
996    case Primitive::kPrimFloat: {
997      if (second.IsFpuRegister()) {
998        __ addss(first.As<XmmRegister>(), second.As<XmmRegister>());
999      } else {
1000        __ addss(first.As<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1001      }
1002      break;
1003    }
1004
1005    case Primitive::kPrimDouble: {
1006      if (second.IsFpuRegister()) {
1007        __ addsd(first.As<XmmRegister>(), second.As<XmmRegister>());
1008      } else {
1009        __ addsd(first.As<XmmRegister>(), Address(ESP, second.GetStackIndex()));
1010      }
1011      break;
1012    }
1013
1014    default:
1015      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1016  }
1017}
1018
1019void LocationsBuilderX86::VisitSub(HSub* sub) {
1020  LocationSummary* locations =
1021      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1022  switch (sub->GetResultType()) {
1023    case Primitive::kPrimInt:
1024    case Primitive::kPrimLong: {
1025      locations->SetInAt(0, Location::RequiresRegister());
1026      locations->SetInAt(1, Location::Any());
1027      locations->SetOut(Location::SameAsFirstInput());
1028      break;
1029    }
1030
1031    case Primitive::kPrimBoolean:
1032    case Primitive::kPrimByte:
1033    case Primitive::kPrimChar:
1034    case Primitive::kPrimShort:
1035      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1036      break;
1037
1038    default:
1039      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
1040  }
1041}
1042
1043void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
1044  LocationSummary* locations = sub->GetLocations();
1045  Location first = locations->InAt(0);
1046  Location second = locations->InAt(1);
1047  switch (sub->GetResultType()) {
1048    case Primitive::kPrimInt: {
1049      DCHECK_EQ(first.As<Register>(),
1050                locations->Out().As<Register>());
1051      if (second.IsRegister()) {
1052        __ subl(first.As<Register>(),
1053                second.As<Register>());
1054      } else if (second.IsConstant()) {
1055        HConstant* instruction = second.GetConstant();
1056        Immediate imm(instruction->AsIntConstant()->GetValue());
1057        __ subl(first.As<Register>(), imm);
1058      } else {
1059        __ subl(first.As<Register>(),
1060                Address(ESP, second.GetStackIndex()));
1061      }
1062      break;
1063    }
1064
1065    case Primitive::kPrimLong: {
1066      DCHECK_EQ(first.AsRegisterPairLow<Register>(),
1067                locations->Out().AsRegisterPairLow<Register>());
1068      DCHECK_EQ(first.AsRegisterPairHigh<Register>(),
1069                locations->Out().AsRegisterPairHigh<Register>());
1070      if (second.IsRegister()) {
1071        __ subl(first.AsRegisterPairLow<Register>(),
1072                second.AsRegisterPairLow<Register>());
1073        __ sbbl(first.AsRegisterPairHigh<Register>(),
1074                second.AsRegisterPairHigh<Register>());
1075      } else {
1076        __ subl(first.AsRegisterPairLow<Register>(),
1077                Address(ESP, second.GetStackIndex()));
1078        __ sbbl(first.AsRegisterPairHigh<Register>(),
1079                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
1080      }
1081      break;
1082    }
1083
1084    case Primitive::kPrimBoolean:
1085    case Primitive::kPrimByte:
1086    case Primitive::kPrimChar:
1087    case Primitive::kPrimShort:
1088      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1089      break;
1090
1091    default:
1092      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
1093  }
1094}
1095
1096void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
1097  LocationSummary* locations =
1098      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1099  locations->SetOut(Location::RegisterLocation(EAX));
1100  InvokeRuntimeCallingConvention calling_convention;
1101  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1102  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1103}
1104
1105void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
1106  InvokeRuntimeCallingConvention calling_convention;
1107  LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1108  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
1109
1110  __ fs()->call(
1111      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
1112
1113  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1114  DCHECK(!codegen_->IsLeafMethod());
1115}
1116
1117void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
1118  LocationSummary* locations =
1119      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1120  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1121  if (location.IsStackSlot()) {
1122    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1123  } else if (location.IsDoubleStackSlot()) {
1124    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1125  }
1126  locations->SetOut(location);
1127}
1128
1129void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
1130}
1131
1132void LocationsBuilderX86::VisitNot(HNot* instruction) {
1133  LocationSummary* locations =
1134      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1135  locations->SetInAt(0, Location::RequiresRegister());
1136  locations->SetOut(Location::SameAsFirstInput());
1137}
1138
1139void InstructionCodeGeneratorX86::VisitNot(HNot* instruction) {
1140  LocationSummary* locations = instruction->GetLocations();
1141  Location out = locations->Out();
1142  DCHECK_EQ(locations->InAt(0).As<Register>(), out.As<Register>());
1143  __ xorl(out.As<Register>(), Immediate(1));
1144}
1145
1146void LocationsBuilderX86::VisitCompare(HCompare* compare) {
1147  LocationSummary* locations =
1148      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1149  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1150  locations->SetInAt(1, Location::Any(), Location::kDiesAtEntry);
1151  locations->SetOut(Location::RequiresRegister());
1152}
1153
1154void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
1155  Label greater, done;
1156  LocationSummary* locations = compare->GetLocations();
1157  switch (compare->InputAt(0)->GetType()) {
1158    case Primitive::kPrimLong: {
1159      Label less, greater, done;
1160      Register output = locations->Out().As<Register>();
1161      Location left = locations->InAt(0);
1162      Location right = locations->InAt(1);
1163      if (right.IsRegister()) {
1164        __ cmpl(left.AsRegisterPairHigh<Register>(), right.AsRegisterPairHigh<Register>());
1165      } else {
1166        DCHECK(right.IsDoubleStackSlot());
1167        __ cmpl(left.AsRegisterPairHigh<Register>(),
1168                Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1169      }
1170      __ j(kLess, &less);  // Signed compare.
1171      __ j(kGreater, &greater);  // Signed compare.
1172      if (right.IsRegisterPair()) {
1173        __ cmpl(left.AsRegisterPairLow<Register>(), right.AsRegisterPairLow<Register>());
1174      } else {
1175        DCHECK(right.IsDoubleStackSlot());
1176        __ cmpl(left.AsRegisterPairLow<Register>(), Address(ESP, right.GetStackIndex()));
1177      }
1178      __ movl(output, Immediate(0));
1179      __ j(kEqual, &done);
1180      __ j(kBelow, &less);  // Unsigned compare.
1181
1182      __ Bind(&greater);
1183      __ movl(output, Immediate(1));
1184      __ jmp(&done);
1185
1186      __ Bind(&less);
1187      __ movl(output, Immediate(-1));
1188
1189      __ Bind(&done);
1190      break;
1191    }
1192    default:
1193      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1194  }
1195}
1196
1197void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
1198  LocationSummary* locations =
1199      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1200  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1201    locations->SetInAt(i, Location::Any());
1202  }
1203  locations->SetOut(Location::Any());
1204}
1205
1206void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
1207  LOG(FATAL) << "Unreachable";
1208}
1209
1210void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1211  LocationSummary* locations =
1212      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1213  locations->SetInAt(0, Location::RequiresRegister());
1214  Primitive::Type field_type = instruction->GetFieldType();
1215  bool is_object_type = field_type == Primitive::kPrimNot;
1216  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
1217      || (field_type == Primitive::kPrimByte);
1218  // The register allocator does not support multiple
1219  // inputs that die at entry with one in a specific register.
1220  bool dies_at_entry = !is_object_type && !is_byte_type;
1221  if (is_byte_type) {
1222    // Ensure the value is in a byte register.
1223    locations->SetInAt(1, Location::RegisterLocation(EAX), dies_at_entry);
1224  } else {
1225    locations->SetInAt(1, Location::RequiresRegister(), dies_at_entry);
1226  }
1227  // Temporary registers for the write barrier.
1228  if (is_object_type) {
1229    locations->AddTemp(Location::RequiresRegister());
1230    // Ensure the card is in a byte register.
1231    locations->AddTemp(Location::RegisterLocation(ECX));
1232  }
1233}
1234
1235void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1236  LocationSummary* locations = instruction->GetLocations();
1237  Register obj = locations->InAt(0).As<Register>();
1238  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1239  Primitive::Type field_type = instruction->GetFieldType();
1240
1241  switch (field_type) {
1242    case Primitive::kPrimBoolean:
1243    case Primitive::kPrimByte: {
1244      ByteRegister value = locations->InAt(1).As<ByteRegister>();
1245      __ movb(Address(obj, offset), value);
1246      break;
1247    }
1248
1249    case Primitive::kPrimShort:
1250    case Primitive::kPrimChar: {
1251      Register value = locations->InAt(1).As<Register>();
1252      __ movw(Address(obj, offset), value);
1253      break;
1254    }
1255
1256    case Primitive::kPrimInt:
1257    case Primitive::kPrimNot: {
1258      Register value = locations->InAt(1).As<Register>();
1259      __ movl(Address(obj, offset), value);
1260
1261      if (field_type == Primitive::kPrimNot) {
1262        Register temp = locations->GetTemp(0).As<Register>();
1263        Register card = locations->GetTemp(1).As<Register>();
1264        codegen_->MarkGCCard(temp, card, obj, value);
1265      }
1266      break;
1267    }
1268
1269    case Primitive::kPrimLong: {
1270      Location value = locations->InAt(1);
1271      __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
1272      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh<Register>());
1273      break;
1274    }
1275
1276    case Primitive::kPrimFloat:
1277    case Primitive::kPrimDouble:
1278      LOG(FATAL) << "Unimplemented register type " << field_type;
1279
1280    case Primitive::kPrimVoid:
1281      LOG(FATAL) << "Unreachable type " << field_type;
1282  }
1283}
1284
1285void CodeGeneratorX86::MarkGCCard(Register temp, Register card, Register object, Register value) {
1286  Label is_null;
1287  __ testl(value, value);
1288  __ j(kEqual, &is_null);
1289  __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
1290  __ movl(temp, object);
1291  __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
1292  __ movb(Address(temp, card, TIMES_1, 0),
1293          X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
1294  __ Bind(&is_null);
1295}
1296
1297void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1298  LocationSummary* locations =
1299      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1300  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1301  locations->SetOut(Location::RequiresRegister());
1302}
1303
1304void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1305  LocationSummary* locations = instruction->GetLocations();
1306  Register obj = locations->InAt(0).As<Register>();
1307  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1308
1309  switch (instruction->GetType()) {
1310    case Primitive::kPrimBoolean: {
1311      Register out = locations->Out().As<Register>();
1312      __ movzxb(out, Address(obj, offset));
1313      break;
1314    }
1315
1316    case Primitive::kPrimByte: {
1317      Register out = locations->Out().As<Register>();
1318      __ movsxb(out, Address(obj, offset));
1319      break;
1320    }
1321
1322    case Primitive::kPrimShort: {
1323      Register out = locations->Out().As<Register>();
1324      __ movsxw(out, Address(obj, offset));
1325      break;
1326    }
1327
1328    case Primitive::kPrimChar: {
1329      Register out = locations->Out().As<Register>();
1330      __ movzxw(out, Address(obj, offset));
1331      break;
1332    }
1333
1334    case Primitive::kPrimInt:
1335    case Primitive::kPrimNot: {
1336      Register out = locations->Out().As<Register>();
1337      __ movl(out, Address(obj, offset));
1338      break;
1339    }
1340
1341    case Primitive::kPrimLong: {
1342      // TODO: support volatile.
1343      __ movl(locations->Out().AsRegisterPairLow<Register>(), Address(obj, offset));
1344      __ movl(locations->Out().AsRegisterPairHigh<Register>(), Address(obj, kX86WordSize + offset));
1345      break;
1346    }
1347
1348    case Primitive::kPrimFloat:
1349    case Primitive::kPrimDouble:
1350      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1351
1352    case Primitive::kPrimVoid:
1353      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1354  }
1355}
1356
1357void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
1358  LocationSummary* locations =
1359      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1360  locations->SetInAt(0, Location::Any());
1361  if (instruction->HasUses()) {
1362    locations->SetOut(Location::SameAsFirstInput());
1363  }
1364}
1365
1366void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
1367  SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction);
1368  codegen_->AddSlowPath(slow_path);
1369
1370  LocationSummary* locations = instruction->GetLocations();
1371  Location obj = locations->InAt(0);
1372
1373  if (obj.IsRegister()) {
1374    __ cmpl(obj.As<Register>(), Immediate(0));
1375  } else if (obj.IsStackSlot()) {
1376    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
1377  } else {
1378    DCHECK(obj.IsConstant()) << obj;
1379    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1380    __ jmp(slow_path->GetEntryLabel());
1381    return;
1382  }
1383  __ j(kEqual, slow_path->GetEntryLabel());
1384}
1385
1386void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
1387  LocationSummary* locations =
1388      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1389  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1390  locations->SetInAt(
1391      1, Location::RegisterOrConstant(instruction->InputAt(1)), Location::kDiesAtEntry);
1392  locations->SetOut(Location::RequiresRegister());
1393}
1394
1395void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
1396  LocationSummary* locations = instruction->GetLocations();
1397  Register obj = locations->InAt(0).As<Register>();
1398  Location index = locations->InAt(1);
1399
1400  switch (instruction->GetType()) {
1401    case Primitive::kPrimBoolean: {
1402      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1403      Register out = locations->Out().As<Register>();
1404      if (index.IsConstant()) {
1405        __ movzxb(out, Address(obj,
1406            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1407      } else {
1408        __ movzxb(out, Address(obj, index.As<Register>(), TIMES_1, data_offset));
1409      }
1410      break;
1411    }
1412
1413    case Primitive::kPrimByte: {
1414      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
1415      Register out = locations->Out().As<Register>();
1416      if (index.IsConstant()) {
1417        __ movsxb(out, Address(obj,
1418            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1419      } else {
1420        __ movsxb(out, Address(obj, index.As<Register>(), TIMES_1, data_offset));
1421      }
1422      break;
1423    }
1424
1425    case Primitive::kPrimShort: {
1426      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
1427      Register out = locations->Out().As<Register>();
1428      if (index.IsConstant()) {
1429        __ movsxw(out, Address(obj,
1430            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1431      } else {
1432        __ movsxw(out, Address(obj, index.As<Register>(), TIMES_2, data_offset));
1433      }
1434      break;
1435    }
1436
1437    case Primitive::kPrimChar: {
1438      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1439      Register out = locations->Out().As<Register>();
1440      if (index.IsConstant()) {
1441        __ movzxw(out, Address(obj,
1442            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1443      } else {
1444        __ movzxw(out, Address(obj, index.As<Register>(), TIMES_2, data_offset));
1445      }
1446      break;
1447    }
1448
1449    case Primitive::kPrimInt:
1450    case Primitive::kPrimNot: {
1451      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1452      Register out = locations->Out().As<Register>();
1453      if (index.IsConstant()) {
1454        __ movl(out, Address(obj,
1455            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
1456      } else {
1457        __ movl(out, Address(obj, index.As<Register>(), TIMES_4, data_offset));
1458      }
1459      break;
1460    }
1461
1462    case Primitive::kPrimLong: {
1463      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1464      Location out = locations->Out();
1465      if (index.IsConstant()) {
1466        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1467        __ movl(out.AsRegisterPairLow<Register>(), Address(obj, offset));
1468        __ movl(out.AsRegisterPairHigh<Register>(), Address(obj, offset + kX86WordSize));
1469      } else {
1470        __ movl(out.AsRegisterPairLow<Register>(),
1471                Address(obj, index.As<Register>(), TIMES_8, data_offset));
1472        __ movl(out.AsRegisterPairHigh<Register>(),
1473                Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize));
1474      }
1475      break;
1476    }
1477
1478    case Primitive::kPrimFloat:
1479    case Primitive::kPrimDouble:
1480      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1481
1482    case Primitive::kPrimVoid:
1483      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1484  }
1485}
1486
1487void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
1488  Primitive::Type value_type = instruction->GetComponentType();
1489  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1490      instruction,
1491      value_type == Primitive::kPrimNot ? LocationSummary::kCall : LocationSummary::kNoCall);
1492
1493  if (value_type == Primitive::kPrimNot) {
1494    InvokeRuntimeCallingConvention calling_convention;
1495    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1496    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1497    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1498  } else {
1499    bool is_byte_type = (value_type == Primitive::kPrimBoolean)
1500        || (value_type == Primitive::kPrimByte);
1501    // We need the inputs to be different than the output in case of long operation.
1502    // In case of a byte operation, the register allocator does not support multiple
1503    // inputs that die at entry with one in a specific register.
1504    bool dies_at_entry = value_type != Primitive::kPrimLong && !is_byte_type;
1505    locations->SetInAt(0, Location::RequiresRegister(), dies_at_entry);
1506    locations->SetInAt(
1507        1, Location::RegisterOrConstant(instruction->InputAt(1)), dies_at_entry);
1508    if (is_byte_type) {
1509      // Ensure the value is in a byte register.
1510      locations->SetInAt(2, Location::ByteRegisterOrConstant(
1511          EAX, instruction->InputAt(2)), dies_at_entry);
1512    } else {
1513      locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)), dies_at_entry);
1514    }
1515  }
1516}
1517
1518void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
1519  LocationSummary* locations = instruction->GetLocations();
1520  Register obj = locations->InAt(0).As<Register>();
1521  Location index = locations->InAt(1);
1522  Location value = locations->InAt(2);
1523  Primitive::Type value_type = instruction->GetComponentType();
1524
1525  switch (value_type) {
1526    case Primitive::kPrimBoolean:
1527    case Primitive::kPrimByte: {
1528      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1529      if (index.IsConstant()) {
1530        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1531        if (value.IsRegister()) {
1532          __ movb(Address(obj, offset), value.As<ByteRegister>());
1533        } else {
1534          __ movb(Address(obj, offset),
1535                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1536        }
1537      } else {
1538        if (value.IsRegister()) {
1539          __ movb(Address(obj, index.As<Register>(), TIMES_1, data_offset),
1540                  value.As<ByteRegister>());
1541        } else {
1542          __ movb(Address(obj, index.As<Register>(), TIMES_1, data_offset),
1543                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1544        }
1545      }
1546      break;
1547    }
1548
1549    case Primitive::kPrimShort:
1550    case Primitive::kPrimChar: {
1551      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1552      if (index.IsConstant()) {
1553        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1554        if (value.IsRegister()) {
1555          __ movw(Address(obj, offset), value.As<Register>());
1556        } else {
1557          __ movw(Address(obj, offset),
1558                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1559        }
1560      } else {
1561        if (value.IsRegister()) {
1562          __ movw(Address(obj, index.As<Register>(), TIMES_2, data_offset),
1563                  value.As<Register>());
1564        } else {
1565          __ movw(Address(obj, index.As<Register>(), TIMES_2, data_offset),
1566                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1567        }
1568      }
1569      break;
1570    }
1571
1572    case Primitive::kPrimInt: {
1573      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1574      if (index.IsConstant()) {
1575        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1576        if (value.IsRegister()) {
1577          __ movl(Address(obj, offset), value.As<Register>());
1578        } else {
1579          __ movl(Address(obj, offset), Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1580        }
1581      } else {
1582        if (value.IsRegister()) {
1583          __ movl(Address(obj, index.As<Register>(), TIMES_4, data_offset),
1584                  value.As<Register>());
1585        } else {
1586          __ movl(Address(obj, index.As<Register>(), TIMES_4, data_offset),
1587                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1588        }
1589      }
1590      break;
1591    }
1592
1593    case Primitive::kPrimNot: {
1594      DCHECK(!codegen_->IsLeafMethod());
1595      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
1596      codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1597      break;
1598    }
1599
1600    case Primitive::kPrimLong: {
1601      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1602      if (index.IsConstant()) {
1603        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1604        if (value.IsRegisterPair()) {
1605          __ movl(Address(obj, offset), value.AsRegisterPairLow<Register>());
1606          __ movl(Address(obj, offset + kX86WordSize), value.AsRegisterPairHigh<Register>());
1607        } else {
1608          DCHECK(value.IsConstant());
1609          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
1610          __ movl(Address(obj, offset), Immediate(Low32Bits(val)));
1611          __ movl(Address(obj, offset + kX86WordSize), Immediate(High32Bits(val)));
1612        }
1613      } else {
1614        if (value.IsRegisterPair()) {
1615          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset),
1616                  value.AsRegisterPairLow<Register>());
1617          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize),
1618                  value.AsRegisterPairHigh<Register>());
1619        } else {
1620          DCHECK(value.IsConstant());
1621          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
1622          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset),
1623                  Immediate(Low32Bits(val)));
1624          __ movl(Address(obj, index.As<Register>(), TIMES_8, data_offset + kX86WordSize),
1625                  Immediate(High32Bits(val)));
1626        }
1627      }
1628      break;
1629    }
1630
1631    case Primitive::kPrimFloat:
1632    case Primitive::kPrimDouble:
1633      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1634
1635    case Primitive::kPrimVoid:
1636      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1637  }
1638}
1639
1640void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
1641  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1642  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1643  locations->SetOut(Location::RequiresRegister());
1644  instruction->SetLocations(locations);
1645}
1646
1647void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
1648  LocationSummary* locations = instruction->GetLocations();
1649  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
1650  Register obj = locations->InAt(0).As<Register>();
1651  Register out = locations->Out().As<Register>();
1652  __ movl(out, Address(obj, offset));
1653}
1654
1655void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
1656  LocationSummary* locations =
1657      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1658  locations->SetInAt(0, Location::RequiresRegister());
1659  locations->SetInAt(1, Location::RequiresRegister());
1660  if (instruction->HasUses()) {
1661    locations->SetOut(Location::SameAsFirstInput());
1662  }
1663}
1664
1665void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
1666  LocationSummary* locations = instruction->GetLocations();
1667  SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(
1668      instruction, locations->InAt(0), locations->InAt(1));
1669  codegen_->AddSlowPath(slow_path);
1670
1671  Register index = locations->InAt(0).As<Register>();
1672  Register length = locations->InAt(1).As<Register>();
1673
1674  __ cmpl(index, length);
1675  __ j(kAboveEqual, slow_path->GetEntryLabel());
1676}
1677
1678void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
1679  temp->SetLocations(nullptr);
1680}
1681
1682void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
1683  // Nothing to do, this is driven by the code generator.
1684}
1685
1686void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
1687  LOG(FATAL) << "Unreachable";
1688}
1689
1690void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
1691  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1692}
1693
1694void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
1695  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1696}
1697
1698void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
1699  HBasicBlock* block = instruction->GetBlock();
1700  if (block->GetLoopInformation() != nullptr) {
1701    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
1702    // The back edge will generate the suspend check.
1703    return;
1704  }
1705  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
1706    // The goto will generate the suspend check.
1707    return;
1708  }
1709  GenerateSuspendCheck(instruction, nullptr);
1710}
1711
1712void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
1713                                                       HBasicBlock* successor) {
1714  SuspendCheckSlowPathX86* slow_path =
1715      new (GetGraph()->GetArena()) SuspendCheckSlowPathX86(instruction, successor);
1716  codegen_->AddSlowPath(slow_path);
1717  __ fs()->cmpw(Address::Absolute(
1718      Thread::ThreadFlagsOffset<kX86WordSize>().Int32Value()), Immediate(0));
1719  if (successor == nullptr) {
1720    __ j(kNotEqual, slow_path->GetEntryLabel());
1721    __ Bind(slow_path->GetReturnLabel());
1722  } else {
1723    __ j(kEqual, codegen_->GetLabelOf(successor));
1724    __ jmp(slow_path->GetEntryLabel());
1725  }
1726}
1727
1728X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
1729  return codegen_->GetAssembler();
1730}
1731
1732void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
1733  ScratchRegisterScope ensure_scratch(
1734      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1735  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1736  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
1737  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
1738}
1739
1740void ParallelMoveResolverX86::EmitMove(size_t index) {
1741  MoveOperands* move = moves_.Get(index);
1742  Location source = move->GetSource();
1743  Location destination = move->GetDestination();
1744
1745  if (source.IsRegister()) {
1746    if (destination.IsRegister()) {
1747      __ movl(destination.As<Register>(), source.As<Register>());
1748    } else {
1749      DCHECK(destination.IsStackSlot());
1750      __ movl(Address(ESP, destination.GetStackIndex()), source.As<Register>());
1751    }
1752  } else if (source.IsStackSlot()) {
1753    if (destination.IsRegister()) {
1754      __ movl(destination.As<Register>(), Address(ESP, source.GetStackIndex()));
1755    } else {
1756      DCHECK(destination.IsStackSlot());
1757      MoveMemoryToMemory(destination.GetStackIndex(),
1758                         source.GetStackIndex());
1759    }
1760  } else if (source.IsConstant()) {
1761    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
1762    Immediate imm(instruction->AsIntConstant()->GetValue());
1763    if (destination.IsRegister()) {
1764      __ movl(destination.As<Register>(), imm);
1765    } else {
1766      __ movl(Address(ESP, destination.GetStackIndex()), imm);
1767    }
1768  } else {
1769    LOG(FATAL) << "Unimplemented";
1770  }
1771}
1772
1773void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
1774  Register suggested_scratch = reg == EAX ? EBX : EAX;
1775  ScratchRegisterScope ensure_scratch(
1776      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1777
1778  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1779  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
1780  __ movl(Address(ESP, mem + stack_offset), reg);
1781  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
1782}
1783
1784void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
1785  ScratchRegisterScope ensure_scratch1(
1786      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1787
1788  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
1789  ScratchRegisterScope ensure_scratch2(
1790      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1791
1792  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
1793  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
1794  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
1795  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
1796  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
1797  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
1798}
1799
1800void ParallelMoveResolverX86::EmitSwap(size_t index) {
1801  MoveOperands* move = moves_.Get(index);
1802  Location source = move->GetSource();
1803  Location destination = move->GetDestination();
1804
1805  if (source.IsRegister() && destination.IsRegister()) {
1806    __ xchgl(destination.As<Register>(), source.As<Register>());
1807  } else if (source.IsRegister() && destination.IsStackSlot()) {
1808    Exchange(source.As<Register>(), destination.GetStackIndex());
1809  } else if (source.IsStackSlot() && destination.IsRegister()) {
1810    Exchange(destination.As<Register>(), source.GetStackIndex());
1811  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1812    Exchange(destination.GetStackIndex(), source.GetStackIndex());
1813  } else {
1814    LOG(FATAL) << "Unimplemented";
1815  }
1816}
1817
1818void ParallelMoveResolverX86::SpillScratch(int reg) {
1819  __ pushl(static_cast<Register>(reg));
1820}
1821
1822void ParallelMoveResolverX86::RestoreScratch(int reg) {
1823  __ popl(static_cast<Register>(reg));
1824}
1825
1826}  // namespace x86
1827}  // namespace art
1828