code_generator_x86.cc revision 01ef345767ea609417fc511e42007705c9667546
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/stack_checks.h"
27#include "utils/x86/assembler_x86.h"
28#include "utils/x86/managed_register_x86.h"
29
30namespace art {
31
32x86::X86ManagedRegister Location::AsX86() const {
33  return reg().AsX86();
34}
35
36namespace x86 {
37
38static constexpr bool kExplicitStackOverflowCheck = false;
39
40static constexpr int kNumberOfPushedRegistersAtEntry = 1;
41static constexpr int kCurrentMethodStackOffset = 0;
42
43static Location X86CpuLocation(Register reg) {
44  return Location::RegisterLocation(X86ManagedRegister::FromCpuRegister(reg));
45}
46
47static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX };
48static constexpr size_t kRuntimeParameterCoreRegistersLength =
49    arraysize(kRuntimeParameterCoreRegisters);
50static constexpr XmmRegister kRuntimeParameterFpuRegisters[] = { };
51static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
52
53class InvokeRuntimeCallingConvention : public CallingConvention<Register, XmmRegister> {
54 public:
55  InvokeRuntimeCallingConvention()
56      : CallingConvention(kRuntimeParameterCoreRegisters,
57                          kRuntimeParameterCoreRegistersLength,
58                          kRuntimeParameterFpuRegisters,
59                          kRuntimeParameterFpuRegistersLength) {}
60
61 private:
62  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
63};
64
65#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
66
67class NullCheckSlowPathX86 : public SlowPathCode {
68 public:
69  explicit NullCheckSlowPathX86(HNullCheck* instruction) : instruction_(instruction) {}
70
71  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
72    __ Bind(GetEntryLabel());
73    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
74    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
75  }
76
77 private:
78  HNullCheck* const instruction_;
79  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
80};
81
82class StackOverflowCheckSlowPathX86 : public SlowPathCode {
83 public:
84  StackOverflowCheckSlowPathX86() {}
85
86  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
87    __ Bind(GetEntryLabel());
88    __ addl(ESP,
89            Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
90    __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow)));
91  }
92
93 private:
94  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86);
95};
96
97class BoundsCheckSlowPathX86 : public SlowPathCode {
98 public:
99  BoundsCheckSlowPathX86(HBoundsCheck* instruction,
100                         Location index_location,
101                         Location length_location)
102      : instruction_(instruction), index_location_(index_location), length_location_(length_location) {}
103
104  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
105    CodeGeneratorX86* x86_codegen = reinterpret_cast<CodeGeneratorX86*>(codegen);
106    __ Bind(GetEntryLabel());
107    InvokeRuntimeCallingConvention calling_convention;
108    x86_codegen->Move32(X86CpuLocation(calling_convention.GetRegisterAt(0)), index_location_);
109    x86_codegen->Move32(X86CpuLocation(calling_convention.GetRegisterAt(1)), length_location_);
110    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowArrayBounds)));
111    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
112  }
113
114 private:
115  HBoundsCheck* const instruction_;
116  const Location index_location_;
117  const Location length_location_;
118
119  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86);
120};
121
122class SuspendCheckSlowPathX86 : public SlowPathCode {
123 public:
124  explicit SuspendCheckSlowPathX86(HSuspendCheck* instruction, HBasicBlock* successor)
125      : instruction_(instruction), successor_(successor) {}
126
127  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
128    __ Bind(GetEntryLabel());
129    codegen->SaveLiveRegisters(instruction_->GetLocations());
130    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pTestSuspend)));
131    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
132    codegen->RestoreLiveRegisters(instruction_->GetLocations());
133    if (successor_ == nullptr) {
134      __ jmp(GetReturnLabel());
135    } else {
136      __ jmp(codegen->GetLabelOf(successor_));
137    }
138  }
139
140  Label* GetReturnLabel() {
141    DCHECK(successor_ == nullptr);
142    return &return_label_;
143  }
144
145 private:
146  HSuspendCheck* const instruction_;
147  HBasicBlock* const successor_;
148  Label return_label_;
149
150  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86);
151};
152
153#undef __
154#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
155
156inline Condition X86Condition(IfCondition cond) {
157  switch (cond) {
158    case kCondEQ: return kEqual;
159    case kCondNE: return kNotEqual;
160    case kCondLT: return kLess;
161    case kCondLE: return kLessEqual;
162    case kCondGT: return kGreater;
163    case kCondGE: return kGreaterEqual;
164    default:
165      LOG(FATAL) << "Unknown if condition";
166  }
167  return kEqual;
168}
169
170void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
171  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
172}
173
174void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
175  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
176}
177
178void CodeGeneratorX86::SaveCoreRegister(Location stack_location, uint32_t reg_id) {
179  __ movl(Address(ESP, stack_location.GetStackIndex()), static_cast<Register>(reg_id));
180}
181
182void CodeGeneratorX86::RestoreCoreRegister(Location stack_location, uint32_t reg_id) {
183  __ movl(static_cast<Register>(reg_id), Address(ESP, stack_location.GetStackIndex()));
184}
185
186CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
187    : CodeGenerator(graph, kNumberOfRegIds),
188      location_builder_(graph, this),
189      instruction_visitor_(graph, this),
190      move_resolver_(graph->GetArena(), this) {}
191
192size_t CodeGeneratorX86::FrameEntrySpillSize() const {
193  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
194}
195
196static bool* GetBlockedRegisterPairs(bool* blocked_registers) {
197  return blocked_registers + kNumberOfAllocIds;
198}
199
200static bool* GetBlockedXmmRegisters(bool* blocked_registers) {
201  return blocked_registers + kNumberOfCpuRegisters;
202}
203
204ManagedRegister CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type,
205                                                       bool* blocked_registers) const {
206  switch (type) {
207    case Primitive::kPrimLong: {
208      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
209      size_t reg = AllocateFreeRegisterInternal(blocked_register_pairs, kNumberOfRegisterPairs);
210      X86ManagedRegister pair =
211          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
212      blocked_registers[pair.AsRegisterPairLow()] = true;
213      blocked_registers[pair.AsRegisterPairHigh()] = true;
214      // Block all other register pairs that share a register with `pair`.
215      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
216        X86ManagedRegister current =
217            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
218        if (current.AsRegisterPairLow() == pair.AsRegisterPairLow()
219            || current.AsRegisterPairLow() == pair.AsRegisterPairHigh()
220            || current.AsRegisterPairHigh() == pair.AsRegisterPairLow()
221            || current.AsRegisterPairHigh() == pair.AsRegisterPairHigh()) {
222          blocked_register_pairs[i] = true;
223        }
224      }
225      return pair;
226    }
227
228    case Primitive::kPrimByte:
229    case Primitive::kPrimBoolean:
230    case Primitive::kPrimChar:
231    case Primitive::kPrimShort:
232    case Primitive::kPrimInt:
233    case Primitive::kPrimNot: {
234      Register reg = static_cast<Register>(
235          AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters));
236      // Block all register pairs that contain `reg`.
237      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
238      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
239        X86ManagedRegister current =
240            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
241        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
242          blocked_register_pairs[i] = true;
243        }
244      }
245      return X86ManagedRegister::FromCpuRegister(reg);
246    }
247
248    case Primitive::kPrimFloat:
249    case Primitive::kPrimDouble: {
250      XmmRegister reg = static_cast<XmmRegister>(AllocateFreeRegisterInternal(
251          GetBlockedXmmRegisters(blocked_registers), kNumberOfXmmRegisters));
252      return X86ManagedRegister::FromXmmRegister(reg);
253    }
254
255    case Primitive::kPrimVoid:
256      LOG(FATAL) << "Unreachable type " << type;
257  }
258
259  return ManagedRegister::NoRegister();
260}
261
262void CodeGeneratorX86::SetupBlockedRegisters(bool* blocked_registers) const {
263  bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
264
265  // Don't allocate the dalvik style register pair passing.
266  blocked_register_pairs[ECX_EDX] = true;
267
268  // Stack register is always reserved.
269  blocked_registers[ESP] = true;
270
271  // TODO: We currently don't use Quick's callee saved registers.
272  blocked_registers[EBP] = true;
273  blocked_registers[ESI] = true;
274  blocked_registers[EDI] = true;
275  blocked_register_pairs[EAX_EDI] = true;
276  blocked_register_pairs[EDX_EDI] = true;
277  blocked_register_pairs[ECX_EDI] = true;
278  blocked_register_pairs[EBX_EDI] = true;
279}
280
281size_t CodeGeneratorX86::GetNumberOfRegisters() const {
282  return kNumberOfRegIds;
283}
284
285InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
286      : HGraphVisitor(graph),
287        assembler_(codegen->GetAssembler()),
288        codegen_(codegen) {}
289
290void CodeGeneratorX86::GenerateFrameEntry() {
291  // Create a fake register to mimic Quick.
292  static const int kFakeReturnRegister = 8;
293  core_spill_mask_ |= (1 << kFakeReturnRegister);
294
295  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86);
296  if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
297    __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
298    RecordPcInfo(nullptr, 0);
299  }
300
301  // The return PC has already been pushed on the stack.
302  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
303
304  if (!skip_overflow_check && kExplicitStackOverflowCheck) {
305    SlowPathCode* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
306    AddSlowPath(slow_path);
307
308    __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>()));
309    __ j(kLess, slow_path->GetEntryLabel());
310  }
311
312  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
313}
314
315void CodeGeneratorX86::GenerateFrameExit() {
316  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
317}
318
319void CodeGeneratorX86::Bind(Label* label) {
320  __ Bind(label);
321}
322
323void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) {
324  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
325}
326
327Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
328  switch (load->GetType()) {
329    case Primitive::kPrimLong:
330    case Primitive::kPrimDouble:
331      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
332      break;
333
334    case Primitive::kPrimInt:
335    case Primitive::kPrimNot:
336    case Primitive::kPrimFloat:
337      return Location::StackSlot(GetStackSlot(load->GetLocal()));
338
339    case Primitive::kPrimBoolean:
340    case Primitive::kPrimByte:
341    case Primitive::kPrimChar:
342    case Primitive::kPrimShort:
343    case Primitive::kPrimVoid:
344      LOG(FATAL) << "Unexpected type " << load->GetType();
345  }
346
347  LOG(FATAL) << "Unreachable";
348  return Location();
349}
350
351Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
352  switch (type) {
353    case Primitive::kPrimBoolean:
354    case Primitive::kPrimByte:
355    case Primitive::kPrimChar:
356    case Primitive::kPrimShort:
357    case Primitive::kPrimInt:
358    case Primitive::kPrimFloat:
359    case Primitive::kPrimNot: {
360      uint32_t index = gp_index_++;
361      if (index < calling_convention.GetNumberOfRegisters()) {
362        return X86CpuLocation(calling_convention.GetRegisterAt(index));
363      } else {
364        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
365      }
366    }
367
368    case Primitive::kPrimLong:
369    case Primitive::kPrimDouble: {
370      uint32_t index = gp_index_;
371      gp_index_ += 2;
372      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
373        return Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(
374            calling_convention.GetRegisterPairAt(index)));
375      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
376        return Location::QuickParameter(index);
377      } else {
378        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
379      }
380    }
381
382    case Primitive::kPrimVoid:
383      LOG(FATAL) << "Unexpected parameter type " << type;
384      break;
385  }
386  return Location();
387}
388
389void CodeGeneratorX86::Move32(Location destination, Location source) {
390  if (source.Equals(destination)) {
391    return;
392  }
393  if (destination.IsRegister()) {
394    if (source.IsRegister()) {
395      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
396    } else if (source.IsFpuRegister()) {
397      __ movd(destination.AsX86().AsCpuRegister(), source.AsX86().AsXmmRegister());
398    } else {
399      DCHECK(source.IsStackSlot());
400      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
401    }
402  } else if (destination.IsFpuRegister()) {
403    if (source.IsRegister()) {
404      __ movd(destination.AsX86().AsXmmRegister(), source.AsX86().AsCpuRegister());
405    } else if (source.IsFpuRegister()) {
406      __ movaps(destination.AsX86().AsXmmRegister(), source.AsX86().AsXmmRegister());
407    } else {
408      DCHECK(source.IsStackSlot());
409      __ movss(destination.AsX86().AsXmmRegister(), Address(ESP, source.GetStackIndex()));
410    }
411  } else {
412    DCHECK(destination.IsStackSlot());
413    if (source.IsRegister()) {
414      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
415    } else if (source.IsFpuRegister()) {
416      __ movss(Address(ESP, destination.GetStackIndex()), source.AsX86().AsXmmRegister());
417    } else {
418      DCHECK(source.IsStackSlot());
419      __ pushl(Address(ESP, source.GetStackIndex()));
420      __ popl(Address(ESP, destination.GetStackIndex()));
421    }
422  }
423}
424
425void CodeGeneratorX86::Move64(Location destination, Location source) {
426  if (source.Equals(destination)) {
427    return;
428  }
429  if (destination.IsRegister()) {
430    if (source.IsRegister()) {
431      __ movl(destination.AsX86().AsRegisterPairLow(), source.AsX86().AsRegisterPairLow());
432      __ movl(destination.AsX86().AsRegisterPairHigh(), source.AsX86().AsRegisterPairHigh());
433    } else if (source.IsFpuRegister()) {
434      LOG(FATAL) << "Unimplemented";
435    } else if (source.IsQuickParameter()) {
436      uint32_t argument_index = source.GetQuickParameterIndex();
437      InvokeDexCallingConvention calling_convention;
438      __ movl(destination.AsX86().AsRegisterPairLow(),
439              calling_convention.GetRegisterAt(argument_index));
440      __ movl(destination.AsX86().AsRegisterPairHigh(), Address(ESP,
441          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
442    } else {
443      DCHECK(source.IsDoubleStackSlot());
444      __ movl(destination.AsX86().AsRegisterPairLow(), Address(ESP, source.GetStackIndex()));
445      __ movl(destination.AsX86().AsRegisterPairHigh(),
446              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
447    }
448  } else if (destination.IsQuickParameter()) {
449    InvokeDexCallingConvention calling_convention;
450    uint32_t argument_index = destination.GetQuickParameterIndex();
451    if (source.IsRegister()) {
452      __ movl(calling_convention.GetRegisterAt(argument_index), source.AsX86().AsRegisterPairLow());
453      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)),
454              source.AsX86().AsRegisterPairHigh());
455    } else if (source.IsFpuRegister()) {
456      LOG(FATAL) << "Unimplemented";
457    } else {
458      DCHECK(source.IsDoubleStackSlot());
459      __ movl(calling_convention.GetRegisterAt(argument_index),
460              Address(ESP, source.GetStackIndex()));
461      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
462      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)));
463    }
464  } else if (destination.IsFpuRegister()) {
465    if (source.IsDoubleStackSlot()) {
466      __ movsd(destination.AsX86().AsXmmRegister(), Address(ESP, source.GetStackIndex()));
467    } else {
468      LOG(FATAL) << "Unimplemented";
469    }
470  } else {
471    DCHECK(destination.IsDoubleStackSlot());
472    if (source.IsRegister()) {
473      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsRegisterPairLow());
474      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
475              source.AsX86().AsRegisterPairHigh());
476    } else if (source.IsQuickParameter()) {
477      InvokeDexCallingConvention calling_convention;
478      uint32_t argument_index = source.GetQuickParameterIndex();
479      __ movl(Address(ESP, destination.GetStackIndex()),
480              calling_convention.GetRegisterAt(argument_index));
481      DCHECK_EQ(calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize(),
482                static_cast<size_t>(destination.GetHighStackIndex(kX86WordSize)));
483    } else if (source.IsFpuRegister()) {
484      __ movsd(Address(ESP, destination.GetStackIndex()), source.AsX86().AsXmmRegister());
485    } else {
486      DCHECK(source.IsDoubleStackSlot());
487      __ pushl(Address(ESP, source.GetStackIndex()));
488      __ popl(Address(ESP, destination.GetStackIndex()));
489      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
490      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
491    }
492  }
493}
494
495void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
496  if (instruction->AsIntConstant() != nullptr) {
497    Immediate imm(instruction->AsIntConstant()->GetValue());
498    if (location.IsRegister()) {
499      __ movl(location.AsX86().AsCpuRegister(), imm);
500    } else {
501      __ movl(Address(ESP, location.GetStackIndex()), imm);
502    }
503  } else if (instruction->AsLongConstant() != nullptr) {
504    int64_t value = instruction->AsLongConstant()->GetValue();
505    if (location.IsRegister()) {
506      __ movl(location.AsX86().AsRegisterPairLow(), Immediate(Low32Bits(value)));
507      __ movl(location.AsX86().AsRegisterPairHigh(), Immediate(High32Bits(value)));
508    } else {
509      __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
510      __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
511    }
512  } else if (instruction->AsLoadLocal() != nullptr) {
513    int slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
514    switch (instruction->GetType()) {
515      case Primitive::kPrimBoolean:
516      case Primitive::kPrimByte:
517      case Primitive::kPrimChar:
518      case Primitive::kPrimShort:
519      case Primitive::kPrimInt:
520      case Primitive::kPrimNot:
521      case Primitive::kPrimFloat:
522        Move32(location, Location::StackSlot(slot));
523        break;
524
525      case Primitive::kPrimLong:
526      case Primitive::kPrimDouble:
527        Move64(location, Location::DoubleStackSlot(slot));
528        break;
529
530      default:
531        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
532    }
533  } else {
534    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
535    switch (instruction->GetType()) {
536      case Primitive::kPrimBoolean:
537      case Primitive::kPrimByte:
538      case Primitive::kPrimChar:
539      case Primitive::kPrimShort:
540      case Primitive::kPrimInt:
541      case Primitive::kPrimNot:
542      case Primitive::kPrimFloat:
543        Move32(location, instruction->GetLocations()->Out());
544        break;
545
546      case Primitive::kPrimLong:
547      case Primitive::kPrimDouble:
548        Move64(location, instruction->GetLocations()->Out());
549        break;
550
551      default:
552        LOG(FATAL) << "Unexpected type " << instruction->GetType();
553    }
554  }
555}
556
557void LocationsBuilderX86::VisitGoto(HGoto* got) {
558  got->SetLocations(nullptr);
559}
560
561void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
562  HBasicBlock* successor = got->GetSuccessor();
563  DCHECK(!successor->IsExitBlock());
564
565  HBasicBlock* block = got->GetBlock();
566  HInstruction* previous = got->GetPrevious();
567
568  HLoopInformation* info = block->GetLoopInformation();
569  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
570    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
571    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
572    return;
573  }
574
575  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
576    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
577  }
578  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
579    __ jmp(codegen_->GetLabelOf(successor));
580  }
581}
582
583void LocationsBuilderX86::VisitExit(HExit* exit) {
584  exit->SetLocations(nullptr);
585}
586
587void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
588  if (kIsDebugBuild) {
589    __ Comment("Unreachable");
590    __ int3();
591  }
592}
593
594void LocationsBuilderX86::VisitIf(HIf* if_instr) {
595  LocationSummary* locations =
596      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
597  HInstruction* cond = if_instr->InputAt(0);
598  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
599    locations->SetInAt(0, Location::Any(), Location::kDiesAtEntry);
600  }
601}
602
603void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
604  HInstruction* cond = if_instr->InputAt(0);
605  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
606    // Moves do not affect the eflags register, so if the condition is evaluated
607    // just before the if, we don't need to evaluate it again.
608    if (!cond->IsCondition() || !cond->AsCondition()->IsBeforeWhenDisregardMoves(if_instr)) {
609      // Materialized condition, compare against 0.
610      Location lhs = if_instr->GetLocations()->InAt(0);
611      if (lhs.IsRegister()) {
612        __ cmpl(lhs.AsX86().AsCpuRegister(), Immediate(0));
613      } else {
614        __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
615      }
616    }
617    __ j(kNotEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
618  } else {
619    Location lhs = cond->GetLocations()->InAt(0);
620    Location rhs = cond->GetLocations()->InAt(1);
621    // LHS is guaranteed to be in a register (see LocationsBuilderX86::VisitCondition).
622    if (rhs.IsRegister()) {
623      __ cmpl(lhs.AsX86().AsCpuRegister(), rhs.AsX86().AsCpuRegister());
624    } else if (rhs.IsConstant()) {
625      HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
626      Immediate imm(instruction->AsIntConstant()->GetValue());
627      __ cmpl(lhs.AsX86().AsCpuRegister(), imm);
628    } else {
629      __ cmpl(lhs.AsX86().AsCpuRegister(), Address(ESP, rhs.GetStackIndex()));
630    }
631    __ j(X86Condition(cond->AsCondition()->GetCondition()),
632         codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
633  }
634  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
635    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
636  }
637}
638
639void LocationsBuilderX86::VisitLocal(HLocal* local) {
640  local->SetLocations(nullptr);
641}
642
643void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
644  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
645}
646
647void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
648  local->SetLocations(nullptr);
649}
650
651void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
652  // Nothing to do, this is driven by the code generator.
653}
654
655void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
656  LocationSummary* locations =
657      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
658  switch (store->InputAt(1)->GetType()) {
659    case Primitive::kPrimBoolean:
660    case Primitive::kPrimByte:
661    case Primitive::kPrimChar:
662    case Primitive::kPrimShort:
663    case Primitive::kPrimInt:
664    case Primitive::kPrimNot:
665    case Primitive::kPrimFloat:
666      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
667      break;
668
669    case Primitive::kPrimLong:
670    case Primitive::kPrimDouble:
671      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
672      break;
673
674    default:
675      LOG(FATAL) << "Unknown local type " << store->InputAt(1)->GetType();
676  }
677  store->SetLocations(locations);
678}
679
680void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
681}
682
683void LocationsBuilderX86::VisitCondition(HCondition* comp) {
684  LocationSummary* locations =
685      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
686  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
687  locations->SetInAt(1, Location::Any(), Location::kDiesAtEntry);
688  if (comp->NeedsMaterialization()) {
689    locations->SetOut(Location::RequiresRegister());
690  }
691}
692
693void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
694  if (comp->NeedsMaterialization()) {
695    LocationSummary* locations = comp->GetLocations();
696    Register reg = locations->Out().AsX86().AsCpuRegister();
697    // Clear register: setcc only sets the low byte.
698    __ xorl(reg, reg);
699    if (locations->InAt(1).IsRegister()) {
700      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
701              locations->InAt(1).AsX86().AsCpuRegister());
702    } else if (locations->InAt(1).IsConstant()) {
703      HConstant* instruction = locations->InAt(1).GetConstant();
704      Immediate imm(instruction->AsIntConstant()->GetValue());
705      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
706    } else {
707      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
708              Address(ESP, locations->InAt(1).GetStackIndex()));
709    }
710    __ setb(X86Condition(comp->GetCondition()), reg);
711  }
712}
713
714void LocationsBuilderX86::VisitEqual(HEqual* comp) {
715  VisitCondition(comp);
716}
717
718void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
719  VisitCondition(comp);
720}
721
722void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
723  VisitCondition(comp);
724}
725
726void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
727  VisitCondition(comp);
728}
729
730void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
731  VisitCondition(comp);
732}
733
734void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
735  VisitCondition(comp);
736}
737
738void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
739  VisitCondition(comp);
740}
741
742void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
743  VisitCondition(comp);
744}
745
746void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
747  VisitCondition(comp);
748}
749
750void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
751  VisitCondition(comp);
752}
753
754void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
755  VisitCondition(comp);
756}
757
758void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
759  VisitCondition(comp);
760}
761
762void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
763  LocationSummary* locations =
764      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
765  locations->SetOut(Location::ConstantLocation(constant));
766}
767
768void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
769}
770
771void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
772  LocationSummary* locations =
773      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
774  locations->SetOut(Location::ConstantLocation(constant));
775}
776
777void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
778  // Will be generated at use site.
779}
780
781void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
782  ret->SetLocations(nullptr);
783}
784
785void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
786  codegen_->GenerateFrameExit();
787  __ ret();
788}
789
790void LocationsBuilderX86::VisitReturn(HReturn* ret) {
791  LocationSummary* locations =
792      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
793  switch (ret->InputAt(0)->GetType()) {
794    case Primitive::kPrimBoolean:
795    case Primitive::kPrimByte:
796    case Primitive::kPrimChar:
797    case Primitive::kPrimShort:
798    case Primitive::kPrimInt:
799    case Primitive::kPrimNot:
800      locations->SetInAt(0, X86CpuLocation(EAX));
801      break;
802
803    case Primitive::kPrimLong:
804      locations->SetInAt(
805          0, Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
806      break;
807
808    case Primitive::kPrimFloat:
809    case Primitive::kPrimDouble:
810      locations->SetInAt(
811          0, Location::FpuRegisterLocation(X86ManagedRegister::FromXmmRegister(XMM0)));
812      break;
813
814    default:
815      LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
816  }
817}
818
819void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
820  if (kIsDebugBuild) {
821    switch (ret->InputAt(0)->GetType()) {
822      case Primitive::kPrimBoolean:
823      case Primitive::kPrimByte:
824      case Primitive::kPrimChar:
825      case Primitive::kPrimShort:
826      case Primitive::kPrimInt:
827      case Primitive::kPrimNot:
828        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsCpuRegister(), EAX);
829        break;
830
831      case Primitive::kPrimLong:
832        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsRegisterPair(), EAX_EDX);
833        break;
834
835      case Primitive::kPrimFloat:
836      case Primitive::kPrimDouble:
837        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsXmmRegister(), XMM0);
838        break;
839
840      default:
841        LOG(FATAL) << "Unknown return type " << ret->InputAt(0)->GetType();
842    }
843  }
844  codegen_->GenerateFrameExit();
845  __ ret();
846}
847
848void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
849  HandleInvoke(invoke);
850}
851
852void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
853  Register temp = invoke->GetLocations()->GetTemp(0).AsX86().AsCpuRegister();
854  uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
855  size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() +
856      invoke->GetIndexInDexCache() * kX86WordSize;
857
858  // TODO: Implement all kinds of calls:
859  // 1) boot -> boot
860  // 2) app -> boot
861  // 3) app -> app
862  //
863  // Currently we implement the app -> app logic, which looks up in the resolve cache.
864
865  // temp = method;
866  LoadCurrentMethod(temp);
867  // temp = temp->dex_cache_resolved_methods_;
868  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
869  // temp = temp[index_in_cache]
870  __ movl(temp, Address(temp, index_in_cache));
871  // (temp + offset_of_quick_compiled_code)()
872  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
873
874  DCHECK(!codegen_->IsLeafMethod());
875  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
876}
877
878void LocationsBuilderX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
879  HandleInvoke(invoke);
880}
881
882void LocationsBuilderX86::HandleInvoke(HInvoke* invoke) {
883  LocationSummary* locations =
884      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
885  locations->AddTemp(X86CpuLocation(EAX));
886
887  InvokeDexCallingConventionVisitor calling_convention_visitor;
888  for (size_t i = 0; i < invoke->InputCount(); i++) {
889    HInstruction* input = invoke->InputAt(i);
890    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
891  }
892
893  switch (invoke->GetType()) {
894    case Primitive::kPrimBoolean:
895    case Primitive::kPrimByte:
896    case Primitive::kPrimChar:
897    case Primitive::kPrimShort:
898    case Primitive::kPrimInt:
899    case Primitive::kPrimNot:
900      locations->SetOut(X86CpuLocation(EAX));
901      break;
902
903    case Primitive::kPrimLong:
904      locations->SetOut(Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
905      break;
906
907    case Primitive::kPrimVoid:
908      break;
909
910    case Primitive::kPrimDouble:
911    case Primitive::kPrimFloat:
912      locations->SetOut(Location::FpuRegisterLocation(X86ManagedRegister::FromXmmRegister(XMM0)));
913      break;
914  }
915
916  invoke->SetLocations(locations);
917}
918
919void InstructionCodeGeneratorX86::VisitInvokeVirtual(HInvokeVirtual* invoke) {
920  Register temp = invoke->GetLocations()->GetTemp(0).AsX86().AsCpuRegister();
921  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
922          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
923  LocationSummary* locations = invoke->GetLocations();
924  Location receiver = locations->InAt(0);
925  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
926  // temp = object->GetClass();
927  if (receiver.IsStackSlot()) {
928    __ movl(temp, Address(ESP, receiver.GetStackIndex()));
929    __ movl(temp, Address(temp, class_offset));
930  } else {
931    __ movl(temp, Address(receiver.AsX86().AsCpuRegister(), class_offset));
932  }
933  // temp = temp->GetMethodAt(method_offset);
934  __ movl(temp, Address(temp, method_offset));
935  // call temp->GetEntryPoint();
936  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
937
938  DCHECK(!codegen_->IsLeafMethod());
939  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
940}
941
942void LocationsBuilderX86::VisitAdd(HAdd* add) {
943  LocationSummary* locations =
944      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
945  switch (add->GetResultType()) {
946    case Primitive::kPrimInt:
947    case Primitive::kPrimLong: {
948      locations->SetInAt(0, Location::RequiresRegister());
949      locations->SetInAt(1, Location::Any());
950      locations->SetOut(Location::SameAsFirstInput());
951      break;
952    }
953
954    case Primitive::kPrimFloat:
955    case Primitive::kPrimDouble: {
956      locations->SetInAt(0, Location::RequiresFpuRegister());
957      locations->SetInAt(1, Location::Any());
958      locations->SetOut(Location::SameAsFirstInput());
959      break;
960    }
961
962    default:
963      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
964      break;
965  }
966}
967
968void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
969  LocationSummary* locations = add->GetLocations();
970  Location first = locations->InAt(0);
971  Location second = locations->InAt(1);
972
973  switch (add->GetResultType()) {
974    case Primitive::kPrimInt: {
975      DCHECK_EQ(first.AsX86().AsCpuRegister(), locations->Out().AsX86().AsCpuRegister());
976      if (second.IsRegister()) {
977        __ addl(first.AsX86().AsCpuRegister(), second.AsX86().AsCpuRegister());
978      } else if (second.IsConstant()) {
979        HConstant* instruction = second.GetConstant();
980        Immediate imm(instruction->AsIntConstant()->GetValue());
981        __ addl(first.AsX86().AsCpuRegister(), imm);
982      } else {
983        __ addl(first.AsX86().AsCpuRegister(), Address(ESP, second.GetStackIndex()));
984      }
985      break;
986    }
987
988    case Primitive::kPrimLong: {
989      DCHECK_EQ(first.AsX86().AsRegisterPair(),
990                locations->Out().AsX86().AsRegisterPair());
991      if (second.IsRegister()) {
992        __ addl(first.AsX86().AsRegisterPairLow(), second.AsX86().AsRegisterPairLow());
993        __ adcl(first.AsX86().AsRegisterPairHigh(), second.AsX86().AsRegisterPairHigh());
994      } else {
995        __ addl(first.AsX86().AsRegisterPairLow(), Address(ESP, second.GetStackIndex()));
996        __ adcl(first.AsX86().AsRegisterPairHigh(),
997                Address(ESP, second.GetHighStackIndex(kX86WordSize)));
998      }
999      break;
1000    }
1001
1002    case Primitive::kPrimFloat: {
1003      if (second.IsFpuRegister()) {
1004        __ addss(first.AsX86().AsXmmRegister(), second.AsX86().AsXmmRegister());
1005      } else {
1006        __ addss(first.AsX86().AsXmmRegister(), Address(ESP, second.GetStackIndex()));
1007      }
1008      break;
1009    }
1010
1011    case Primitive::kPrimDouble: {
1012      if (second.IsFpuRegister()) {
1013        __ addsd(first.AsX86().AsXmmRegister(), second.AsX86().AsXmmRegister());
1014      } else {
1015        __ addsd(first.AsX86().AsXmmRegister(), Address(ESP, second.GetStackIndex()));
1016      }
1017      break;
1018    }
1019
1020    default:
1021      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1022  }
1023}
1024
1025void LocationsBuilderX86::VisitSub(HSub* sub) {
1026  LocationSummary* locations =
1027      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1028  switch (sub->GetResultType()) {
1029    case Primitive::kPrimInt:
1030    case Primitive::kPrimLong: {
1031      locations->SetInAt(0, Location::RequiresRegister());
1032      locations->SetInAt(1, Location::Any());
1033      locations->SetOut(Location::SameAsFirstInput());
1034      break;
1035    }
1036
1037    case Primitive::kPrimBoolean:
1038    case Primitive::kPrimByte:
1039    case Primitive::kPrimChar:
1040    case Primitive::kPrimShort:
1041      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1042      break;
1043
1044    default:
1045      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
1046  }
1047}
1048
1049void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
1050  LocationSummary* locations = sub->GetLocations();
1051  switch (sub->GetResultType()) {
1052    case Primitive::kPrimInt: {
1053      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
1054                locations->Out().AsX86().AsCpuRegister());
1055      if (locations->InAt(1).IsRegister()) {
1056        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
1057                locations->InAt(1).AsX86().AsCpuRegister());
1058      } else if (locations->InAt(1).IsConstant()) {
1059        HConstant* instruction = locations->InAt(1).GetConstant();
1060        Immediate imm(instruction->AsIntConstant()->GetValue());
1061        __ subl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
1062      } else {
1063        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
1064                Address(ESP, locations->InAt(1).GetStackIndex()));
1065      }
1066      break;
1067    }
1068
1069    case Primitive::kPrimLong: {
1070      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
1071                locations->Out().AsX86().AsRegisterPair());
1072      if (locations->InAt(1).IsRegister()) {
1073        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
1074                locations->InAt(1).AsX86().AsRegisterPairLow());
1075        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
1076                locations->InAt(1).AsX86().AsRegisterPairHigh());
1077      } else {
1078        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
1079                Address(ESP, locations->InAt(1).GetStackIndex()));
1080        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
1081                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
1082      }
1083      break;
1084    }
1085
1086    case Primitive::kPrimBoolean:
1087    case Primitive::kPrimByte:
1088    case Primitive::kPrimChar:
1089    case Primitive::kPrimShort:
1090      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1091      break;
1092
1093    default:
1094      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
1095  }
1096}
1097
1098void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
1099  LocationSummary* locations =
1100      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1101  locations->SetOut(X86CpuLocation(EAX));
1102  InvokeRuntimeCallingConvention calling_convention;
1103  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(0)));
1104  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(1)));
1105}
1106
1107void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
1108  InvokeRuntimeCallingConvention calling_convention;
1109  LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1110  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
1111
1112  __ fs()->call(
1113      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
1114
1115  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1116  DCHECK(!codegen_->IsLeafMethod());
1117}
1118
1119void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
1120  LocationSummary* locations =
1121      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1122  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1123  if (location.IsStackSlot()) {
1124    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1125  } else if (location.IsDoubleStackSlot()) {
1126    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1127  }
1128  locations->SetOut(location);
1129}
1130
1131void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
1132}
1133
1134void LocationsBuilderX86::VisitNot(HNot* instruction) {
1135  LocationSummary* locations =
1136      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1137  locations->SetInAt(0, Location::RequiresRegister());
1138  locations->SetOut(Location::SameAsFirstInput());
1139}
1140
1141void InstructionCodeGeneratorX86::VisitNot(HNot* instruction) {
1142  LocationSummary* locations = instruction->GetLocations();
1143  Location out = locations->Out();
1144  DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), out.AsX86().AsCpuRegister());
1145  __ xorl(out.AsX86().AsCpuRegister(), Immediate(1));
1146}
1147
1148void LocationsBuilderX86::VisitCompare(HCompare* compare) {
1149  LocationSummary* locations =
1150      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1151  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1152  locations->SetInAt(1, Location::Any(), Location::kDiesAtEntry);
1153  locations->SetOut(Location::RequiresRegister());
1154}
1155
1156void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
1157  Label greater, done;
1158  LocationSummary* locations = compare->GetLocations();
1159  switch (compare->InputAt(0)->GetType()) {
1160    case Primitive::kPrimLong: {
1161      Label less, greater, done;
1162      Register output = locations->Out().AsX86().AsCpuRegister();
1163      X86ManagedRegister left = locations->InAt(0).AsX86();
1164      Location right = locations->InAt(1);
1165      if (right.IsRegister()) {
1166        __ cmpl(left.AsRegisterPairHigh(), right.AsX86().AsRegisterPairHigh());
1167      } else {
1168        DCHECK(right.IsDoubleStackSlot());
1169        __ cmpl(left.AsRegisterPairHigh(), Address(ESP, right.GetHighStackIndex(kX86WordSize)));
1170      }
1171      __ j(kLess, &less);  // Signed compare.
1172      __ j(kGreater, &greater);  // Signed compare.
1173      if (right.IsRegister()) {
1174        __ cmpl(left.AsRegisterPairLow(), right.AsX86().AsRegisterPairLow());
1175      } else {
1176        DCHECK(right.IsDoubleStackSlot());
1177        __ cmpl(left.AsRegisterPairLow(), Address(ESP, right.GetStackIndex()));
1178      }
1179      __ movl(output, Immediate(0));
1180      __ j(kEqual, &done);
1181      __ j(kBelow, &less);  // Unsigned compare.
1182
1183      __ Bind(&greater);
1184      __ movl(output, Immediate(1));
1185      __ jmp(&done);
1186
1187      __ Bind(&less);
1188      __ movl(output, Immediate(-1));
1189
1190      __ Bind(&done);
1191      break;
1192    }
1193    default:
1194      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1195  }
1196}
1197
1198void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
1199  LocationSummary* locations =
1200      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1201  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1202    locations->SetInAt(i, Location::Any());
1203  }
1204  locations->SetOut(Location::Any());
1205}
1206
1207void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
1208  LOG(FATAL) << "Unreachable";
1209}
1210
1211void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1212  LocationSummary* locations =
1213      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1214  locations->SetInAt(0, Location::RequiresRegister());
1215  Primitive::Type field_type = instruction->GetFieldType();
1216  bool is_object_type = field_type == Primitive::kPrimNot;
1217  bool is_byte_type = (field_type == Primitive::kPrimBoolean)
1218      || (field_type == Primitive::kPrimByte);
1219  // The register allocator does not support multiple
1220  // inputs that die at entry with one in a specific register.
1221  bool dies_at_entry = !is_object_type && !is_byte_type;
1222  if (is_byte_type) {
1223    // Ensure the value is in a byte register.
1224    locations->SetInAt(1, X86CpuLocation(EAX), dies_at_entry);
1225  } else {
1226    locations->SetInAt(1, Location::RequiresRegister(), dies_at_entry);
1227  }
1228  // Temporary registers for the write barrier.
1229  if (is_object_type) {
1230    locations->AddTemp(Location::RequiresRegister());
1231    // Ensure the card is in a byte register.
1232    locations->AddTemp(X86CpuLocation(ECX));
1233  }
1234}
1235
1236void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1237  LocationSummary* locations = instruction->GetLocations();
1238  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1239  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1240  Primitive::Type field_type = instruction->GetFieldType();
1241
1242  switch (field_type) {
1243    case Primitive::kPrimBoolean:
1244    case Primitive::kPrimByte: {
1245      ByteRegister value = locations->InAt(1).AsX86().AsByteRegister();
1246      __ movb(Address(obj, offset), value);
1247      break;
1248    }
1249
1250    case Primitive::kPrimShort:
1251    case Primitive::kPrimChar: {
1252      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1253      __ movw(Address(obj, offset), value);
1254      break;
1255    }
1256
1257    case Primitive::kPrimInt:
1258    case Primitive::kPrimNot: {
1259      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1260      __ movl(Address(obj, offset), value);
1261
1262      if (field_type == Primitive::kPrimNot) {
1263        Register temp = locations->GetTemp(0).AsX86().AsCpuRegister();
1264        Register card = locations->GetTemp(1).AsX86().AsCpuRegister();
1265        codegen_->MarkGCCard(temp, card, obj, value);
1266      }
1267      break;
1268    }
1269
1270    case Primitive::kPrimLong: {
1271      X86ManagedRegister value = locations->InAt(1).AsX86();
1272      __ movl(Address(obj, offset), value.AsRegisterPairLow());
1273      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh());
1274      break;
1275    }
1276
1277    case Primitive::kPrimFloat:
1278    case Primitive::kPrimDouble:
1279      LOG(FATAL) << "Unimplemented register type " << field_type;
1280
1281    case Primitive::kPrimVoid:
1282      LOG(FATAL) << "Unreachable type " << field_type;
1283  }
1284}
1285
1286void CodeGeneratorX86::MarkGCCard(Register temp, Register card, Register object, Register value) {
1287  Label is_null;
1288  __ testl(value, value);
1289  __ j(kEqual, &is_null);
1290  __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
1291  __ movl(temp, object);
1292  __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
1293  __ movb(Address(temp, card, TIMES_1, 0),
1294          X86ManagedRegister::FromCpuRegister(card).AsByteRegister());
1295  __ Bind(&is_null);
1296}
1297
1298void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1299  LocationSummary* locations =
1300      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1301  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1302  locations->SetOut(Location::RequiresRegister());
1303}
1304
1305void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1306  LocationSummary* locations = instruction->GetLocations();
1307  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1308  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1309
1310  switch (instruction->GetType()) {
1311    case Primitive::kPrimBoolean: {
1312      Register out = locations->Out().AsX86().AsCpuRegister();
1313      __ movzxb(out, Address(obj, offset));
1314      break;
1315    }
1316
1317    case Primitive::kPrimByte: {
1318      Register out = locations->Out().AsX86().AsCpuRegister();
1319      __ movsxb(out, Address(obj, offset));
1320      break;
1321    }
1322
1323    case Primitive::kPrimShort: {
1324      Register out = locations->Out().AsX86().AsCpuRegister();
1325      __ movsxw(out, Address(obj, offset));
1326      break;
1327    }
1328
1329    case Primitive::kPrimChar: {
1330      Register out = locations->Out().AsX86().AsCpuRegister();
1331      __ movzxw(out, Address(obj, offset));
1332      break;
1333    }
1334
1335    case Primitive::kPrimInt:
1336    case Primitive::kPrimNot: {
1337      Register out = locations->Out().AsX86().AsCpuRegister();
1338      __ movl(out, Address(obj, offset));
1339      break;
1340    }
1341
1342    case Primitive::kPrimLong: {
1343      // TODO: support volatile.
1344      X86ManagedRegister out = locations->Out().AsX86();
1345      __ movl(out.AsRegisterPairLow(), Address(obj, offset));
1346      __ movl(out.AsRegisterPairHigh(), Address(obj, kX86WordSize + offset));
1347      break;
1348    }
1349
1350    case Primitive::kPrimFloat:
1351    case Primitive::kPrimDouble:
1352      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1353
1354    case Primitive::kPrimVoid:
1355      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1356  }
1357}
1358
1359void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
1360  LocationSummary* locations =
1361      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1362  locations->SetInAt(0, Location::Any());
1363  if (instruction->HasUses()) {
1364    locations->SetOut(Location::SameAsFirstInput());
1365  }
1366}
1367
1368void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
1369  SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction);
1370  codegen_->AddSlowPath(slow_path);
1371
1372  LocationSummary* locations = instruction->GetLocations();
1373  Location obj = locations->InAt(0);
1374
1375  if (obj.IsRegister()) {
1376    __ cmpl(obj.AsX86().AsCpuRegister(), Immediate(0));
1377  } else if (obj.IsStackSlot()) {
1378    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
1379  } else {
1380    DCHECK(obj.IsConstant()) << obj;
1381    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1382    __ jmp(slow_path->GetEntryLabel());
1383    return;
1384  }
1385  __ j(kEqual, slow_path->GetEntryLabel());
1386}
1387
1388void LocationsBuilderX86::VisitArrayGet(HArrayGet* instruction) {
1389  LocationSummary* locations =
1390      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1391  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1392  locations->SetInAt(
1393      1, Location::RegisterOrConstant(instruction->InputAt(1)), Location::kDiesAtEntry);
1394  locations->SetOut(Location::RequiresRegister());
1395}
1396
1397void InstructionCodeGeneratorX86::VisitArrayGet(HArrayGet* instruction) {
1398  LocationSummary* locations = instruction->GetLocations();
1399  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1400  Location index = locations->InAt(1);
1401
1402  switch (instruction->GetType()) {
1403    case Primitive::kPrimBoolean: {
1404      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1405      Register out = locations->Out().AsX86().AsCpuRegister();
1406      if (index.IsConstant()) {
1407        __ movzxb(out, Address(obj,
1408            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1409      } else {
1410        __ movzxb(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset));
1411      }
1412      break;
1413    }
1414
1415    case Primitive::kPrimByte: {
1416      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
1417      Register out = locations->Out().AsX86().AsCpuRegister();
1418      if (index.IsConstant()) {
1419        __ movsxb(out, Address(obj,
1420            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1421      } else {
1422        __ movsxb(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset));
1423      }
1424      break;
1425    }
1426
1427    case Primitive::kPrimShort: {
1428      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
1429      Register out = locations->Out().AsX86().AsCpuRegister();
1430      if (index.IsConstant()) {
1431        __ movsxw(out, Address(obj,
1432            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1433      } else {
1434        __ movsxw(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset));
1435      }
1436      break;
1437    }
1438
1439    case Primitive::kPrimChar: {
1440      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1441      Register out = locations->Out().AsX86().AsCpuRegister();
1442      if (index.IsConstant()) {
1443        __ movzxw(out, Address(obj,
1444            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1445      } else {
1446        __ movzxw(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset));
1447      }
1448      break;
1449    }
1450
1451    case Primitive::kPrimInt:
1452    case Primitive::kPrimNot: {
1453      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1454      Register out = locations->Out().AsX86().AsCpuRegister();
1455      if (index.IsConstant()) {
1456        __ movl(out, Address(obj,
1457            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
1458      } else {
1459        __ movl(out, Address(obj, index.AsX86().AsCpuRegister(), TIMES_4, data_offset));
1460      }
1461      break;
1462    }
1463
1464    case Primitive::kPrimLong: {
1465      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1466      X86ManagedRegister out = locations->Out().AsX86();
1467      if (index.IsConstant()) {
1468        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1469        __ movl(out.AsRegisterPairLow(), Address(obj, offset));
1470        __ movl(out.AsRegisterPairHigh(), Address(obj, offset + kX86WordSize));
1471      } else {
1472        __ movl(out.AsRegisterPairLow(),
1473                Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset));
1474        __ movl(out.AsRegisterPairHigh(),
1475                Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset + kX86WordSize));
1476      }
1477      break;
1478    }
1479
1480    case Primitive::kPrimFloat:
1481    case Primitive::kPrimDouble:
1482      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1483
1484    case Primitive::kPrimVoid:
1485      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1486  }
1487}
1488
1489void LocationsBuilderX86::VisitArraySet(HArraySet* instruction) {
1490  Primitive::Type value_type = instruction->GetComponentType();
1491  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1492      instruction,
1493      value_type == Primitive::kPrimNot ? LocationSummary::kCall : LocationSummary::kNoCall);
1494
1495  if (value_type == Primitive::kPrimNot) {
1496    InvokeRuntimeCallingConvention calling_convention;
1497    locations->SetInAt(0, X86CpuLocation(calling_convention.GetRegisterAt(0)));
1498    locations->SetInAt(1, X86CpuLocation(calling_convention.GetRegisterAt(1)));
1499    locations->SetInAt(2, X86CpuLocation(calling_convention.GetRegisterAt(2)));
1500  } else {
1501    bool is_byte_type = (value_type == Primitive::kPrimBoolean)
1502        || (value_type == Primitive::kPrimByte);
1503    // We need the inputs to be different than the output in case of long operation.
1504    // In case of a byte operation, the register allocator does not support multiple
1505    // inputs that die at entry with one in a specific register.
1506    bool dies_at_entry = value_type != Primitive::kPrimLong && !is_byte_type;
1507    locations->SetInAt(0, Location::RequiresRegister(), dies_at_entry);
1508    locations->SetInAt(
1509        1, Location::RegisterOrConstant(instruction->InputAt(1)), dies_at_entry);
1510    if (is_byte_type) {
1511      // Ensure the value is in a byte register.
1512      locations->SetInAt(2, Location::ByteRegisterOrConstant(
1513          X86ManagedRegister::FromCpuRegister(EAX), instruction->InputAt(2)), dies_at_entry);
1514    } else {
1515      locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)), dies_at_entry);
1516    }
1517  }
1518}
1519
1520void InstructionCodeGeneratorX86::VisitArraySet(HArraySet* instruction) {
1521  LocationSummary* locations = instruction->GetLocations();
1522  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1523  Location index = locations->InAt(1);
1524  Location value = locations->InAt(2);
1525  Primitive::Type value_type = instruction->GetComponentType();
1526
1527  switch (value_type) {
1528    case Primitive::kPrimBoolean:
1529    case Primitive::kPrimByte: {
1530      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1531      if (index.IsConstant()) {
1532        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1533        if (value.IsRegister()) {
1534          __ movb(Address(obj, offset), value.AsX86().AsByteRegister());
1535        } else {
1536          __ movb(Address(obj, offset),
1537                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1538        }
1539      } else {
1540        if (value.IsRegister()) {
1541          __ movb(Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset),
1542                  value.AsX86().AsByteRegister());
1543        } else {
1544          __ movb(Address(obj, index.AsX86().AsCpuRegister(), TIMES_1, data_offset),
1545                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1546        }
1547      }
1548      break;
1549    }
1550
1551    case Primitive::kPrimShort:
1552    case Primitive::kPrimChar: {
1553      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1554      if (index.IsConstant()) {
1555        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1556        if (value.IsRegister()) {
1557          __ movw(Address(obj, offset), value.AsX86().AsCpuRegister());
1558        } else {
1559          __ movw(Address(obj, offset),
1560                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1561        }
1562      } else {
1563        if (value.IsRegister()) {
1564          __ movw(Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset),
1565                  value.AsX86().AsCpuRegister());
1566        } else {
1567          __ movw(Address(obj, index.AsX86().AsCpuRegister(), TIMES_2, data_offset),
1568                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1569        }
1570      }
1571      break;
1572    }
1573
1574    case Primitive::kPrimInt: {
1575      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1576      if (index.IsConstant()) {
1577        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1578        if (value.IsRegister()) {
1579          __ movl(Address(obj, offset), value.AsX86().AsCpuRegister());
1580        } else {
1581          __ movl(Address(obj, offset), Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1582        }
1583      } else {
1584        if (value.IsRegister()) {
1585          __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_4, data_offset),
1586                  value.AsX86().AsCpuRegister());
1587        } else {
1588          __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_4, data_offset),
1589                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1590        }
1591      }
1592      break;
1593    }
1594
1595    case Primitive::kPrimNot: {
1596      DCHECK(!codegen_->IsLeafMethod());
1597      __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAputObject)));
1598      codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1599      break;
1600    }
1601
1602    case Primitive::kPrimLong: {
1603      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1604      if (index.IsConstant()) {
1605        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1606        if (value.IsRegister()) {
1607          __ movl(Address(obj, offset), value.AsX86().AsRegisterPairLow());
1608          __ movl(Address(obj, offset + kX86WordSize), value.AsX86().AsRegisterPairHigh());
1609        } else {
1610          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
1611          __ movl(Address(obj, offset), Immediate(Low32Bits(val)));
1612          __ movl(Address(obj, offset + kX86WordSize), Immediate(High32Bits(val)));
1613        }
1614      } else {
1615        if (value.IsRegister()) {
1616          __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset),
1617                  value.AsX86().AsRegisterPairLow());
1618          __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset + kX86WordSize),
1619                  value.AsX86().AsRegisterPairHigh());
1620        } else {
1621          int64_t val = value.GetConstant()->AsLongConstant()->GetValue();
1622          __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset),
1623                  Immediate(Low32Bits(val)));
1624          __ movl(Address(obj, index.AsX86().AsCpuRegister(), TIMES_8, data_offset + kX86WordSize),
1625                  Immediate(High32Bits(val)));
1626        }
1627      }
1628      break;
1629    }
1630
1631    case Primitive::kPrimFloat:
1632    case Primitive::kPrimDouble:
1633      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1634
1635    case Primitive::kPrimVoid:
1636      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1637  }
1638}
1639
1640void LocationsBuilderX86::VisitArrayLength(HArrayLength* instruction) {
1641  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1642  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1643  locations->SetOut(Location::RequiresRegister());
1644  instruction->SetLocations(locations);
1645}
1646
1647void InstructionCodeGeneratorX86::VisitArrayLength(HArrayLength* instruction) {
1648  LocationSummary* locations = instruction->GetLocations();
1649  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
1650  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1651  Register out = locations->Out().AsX86().AsCpuRegister();
1652  __ movl(out, Address(obj, offset));
1653}
1654
1655void LocationsBuilderX86::VisitBoundsCheck(HBoundsCheck* instruction) {
1656  LocationSummary* locations =
1657      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1658  locations->SetInAt(0, Location::RequiresRegister());
1659  locations->SetInAt(1, Location::RequiresRegister());
1660  if (instruction->HasUses()) {
1661    locations->SetOut(Location::SameAsFirstInput());
1662  }
1663}
1664
1665void InstructionCodeGeneratorX86::VisitBoundsCheck(HBoundsCheck* instruction) {
1666  LocationSummary* locations = instruction->GetLocations();
1667  SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86(
1668      instruction, locations->InAt(0), locations->InAt(1));
1669  codegen_->AddSlowPath(slow_path);
1670
1671  Register index = locations->InAt(0).AsX86().AsCpuRegister();
1672  Register length = locations->InAt(1).AsX86().AsCpuRegister();
1673
1674  __ cmpl(index, length);
1675  __ j(kAboveEqual, slow_path->GetEntryLabel());
1676}
1677
1678void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
1679  temp->SetLocations(nullptr);
1680}
1681
1682void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
1683  // Nothing to do, this is driven by the code generator.
1684}
1685
1686void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
1687  LOG(FATAL) << "Unreachable";
1688}
1689
1690void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
1691  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1692}
1693
1694void LocationsBuilderX86::VisitSuspendCheck(HSuspendCheck* instruction) {
1695  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1696}
1697
1698void InstructionCodeGeneratorX86::VisitSuspendCheck(HSuspendCheck* instruction) {
1699  HBasicBlock* block = instruction->GetBlock();
1700  if (block->GetLoopInformation() != nullptr) {
1701    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
1702    // The back edge will generate the suspend check.
1703    return;
1704  }
1705  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
1706    // The goto will generate the suspend check.
1707    return;
1708  }
1709  GenerateSuspendCheck(instruction, nullptr);
1710}
1711
1712void InstructionCodeGeneratorX86::GenerateSuspendCheck(HSuspendCheck* instruction,
1713                                                       HBasicBlock* successor) {
1714  SuspendCheckSlowPathX86* slow_path =
1715      new (GetGraph()->GetArena()) SuspendCheckSlowPathX86(instruction, successor);
1716  codegen_->AddSlowPath(slow_path);
1717  __ fs()->cmpw(Address::Absolute(
1718      Thread::ThreadFlagsOffset<kX86WordSize>().Int32Value()), Immediate(0));
1719  if (successor == nullptr) {
1720    __ j(kNotEqual, slow_path->GetEntryLabel());
1721    __ Bind(slow_path->GetReturnLabel());
1722  } else {
1723    __ j(kEqual, codegen_->GetLabelOf(successor));
1724    __ jmp(slow_path->GetEntryLabel());
1725  }
1726}
1727
1728X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
1729  return codegen_->GetAssembler();
1730}
1731
1732void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
1733  ScratchRegisterScope ensure_scratch(
1734      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1735  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1736  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
1737  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
1738}
1739
1740void ParallelMoveResolverX86::EmitMove(size_t index) {
1741  MoveOperands* move = moves_.Get(index);
1742  Location source = move->GetSource();
1743  Location destination = move->GetDestination();
1744
1745  if (source.IsRegister()) {
1746    if (destination.IsRegister()) {
1747      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1748    } else {
1749      DCHECK(destination.IsStackSlot());
1750      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
1751    }
1752  } else if (source.IsStackSlot()) {
1753    if (destination.IsRegister()) {
1754      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
1755    } else {
1756      DCHECK(destination.IsStackSlot());
1757      MoveMemoryToMemory(destination.GetStackIndex(),
1758                         source.GetStackIndex());
1759    }
1760  } else if (source.IsConstant()) {
1761    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
1762    Immediate imm(instruction->AsIntConstant()->GetValue());
1763    if (destination.IsRegister()) {
1764      __ movl(destination.AsX86().AsCpuRegister(), imm);
1765    } else {
1766      __ movl(Address(ESP, destination.GetStackIndex()), imm);
1767    }
1768  } else {
1769    LOG(FATAL) << "Unimplemented";
1770  }
1771}
1772
1773void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
1774  Register suggested_scratch = reg == EAX ? EBX : EAX;
1775  ScratchRegisterScope ensure_scratch(
1776      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1777
1778  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1779  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
1780  __ movl(Address(ESP, mem + stack_offset), reg);
1781  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
1782}
1783
1784void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
1785  ScratchRegisterScope ensure_scratch1(
1786      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1787
1788  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
1789  ScratchRegisterScope ensure_scratch2(
1790      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1791
1792  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
1793  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
1794  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
1795  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
1796  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
1797  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
1798}
1799
1800void ParallelMoveResolverX86::EmitSwap(size_t index) {
1801  MoveOperands* move = moves_.Get(index);
1802  Location source = move->GetSource();
1803  Location destination = move->GetDestination();
1804
1805  if (source.IsRegister() && destination.IsRegister()) {
1806    __ xchgl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1807  } else if (source.IsRegister() && destination.IsStackSlot()) {
1808    Exchange(source.AsX86().AsCpuRegister(), destination.GetStackIndex());
1809  } else if (source.IsStackSlot() && destination.IsRegister()) {
1810    Exchange(destination.AsX86().AsCpuRegister(), source.GetStackIndex());
1811  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1812    Exchange(destination.GetStackIndex(), source.GetStackIndex());
1813  } else {
1814    LOG(FATAL) << "Unimplemented";
1815  }
1816}
1817
1818void ParallelMoveResolverX86::SpillScratch(int reg) {
1819  __ pushl(static_cast<Register>(reg));
1820}
1821
1822void ParallelMoveResolverX86::RestoreScratch(int reg) {
1823  __ popl(static_cast<Register>(reg));
1824}
1825
1826}  // namespace x86
1827}  // namespace art
1828