code_generator_x86_64.cc revision 26a25ef62a13f409f941aa39825a51b4d6f0f047
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "mirror/object_reference.h"
25#include "thread.h"
26#include "utils/assembler.h"
27#include "utils/stack_checks.h"
28#include "utils/x86_64/assembler_x86_64.h"
29#include "utils/x86_64/managed_register_x86_64.h"
30
31namespace art {
32
33x86_64::X86_64ManagedRegister Location::AsX86_64() const {
34  return reg().AsX86_64();
35}
36
37namespace x86_64 {
38
39static constexpr bool kExplicitStackOverflowCheck = false;
40
41// Some x86_64 instructions require a register to be available as temp.
42static constexpr Register TMP = R11;
43
44static constexpr int kNumberOfPushedRegistersAtEntry = 1;
45static constexpr int kCurrentMethodStackOffset = 0;
46
47static Location X86_64CpuLocation(Register reg) {
48  return Location::RegisterLocation(X86_64ManagedRegister::FromCpuRegister(reg));
49}
50
51static constexpr Register kRuntimeParameterCoreRegisters[] = { RDI, RSI, RDX };
52static constexpr size_t kRuntimeParameterCoreRegistersLength =
53    arraysize(kRuntimeParameterCoreRegisters);
54
55class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
56 public:
57  InvokeRuntimeCallingConvention()
58      : CallingConvention(kRuntimeParameterCoreRegisters,
59                          kRuntimeParameterCoreRegistersLength) {}
60
61 private:
62  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
63};
64
65#define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())->
66
67class NullCheckSlowPathX86_64 : public SlowPathCode {
68 public:
69  explicit NullCheckSlowPathX86_64(HNullCheck* instruction) : instruction_(instruction) {}
70
71  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
72    __ Bind(GetEntryLabel());
73    __ gs()->call(
74        Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pThrowNullPointer), true));
75    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
76  }
77
78 private:
79  HNullCheck* const instruction_;
80  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
81};
82
83class StackOverflowCheckSlowPathX86_64 : public SlowPathCode {
84 public:
85  StackOverflowCheckSlowPathX86_64() {}
86
87  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
88    __ Bind(GetEntryLabel());
89    __ addq(CpuRegister(RSP),
90            Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
91    __ gs()->jmp(
92        Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pThrowStackOverflow), true));
93  }
94
95 private:
96  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86_64);
97};
98
99class SuspendCheckSlowPathX86_64 : public SlowPathCode {
100 public:
101  explicit SuspendCheckSlowPathX86_64(HSuspendCheck* instruction, HBasicBlock* successor)
102      : instruction_(instruction), successor_(successor) {}
103
104  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
105    __ Bind(GetEntryLabel());
106    codegen->SaveLiveRegisters(instruction_->GetLocations());
107    __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pTestSuspend), true));
108    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
109    codegen->RestoreLiveRegisters(instruction_->GetLocations());
110    if (successor_ == nullptr) {
111      __ jmp(GetReturnLabel());
112    } else {
113      __ jmp(codegen->GetLabelOf(successor_));
114    }
115  }
116
117  Label* GetReturnLabel() {
118    DCHECK(successor_ == nullptr);
119    return &return_label_;
120  }
121
122 private:
123  HSuspendCheck* const instruction_;
124  HBasicBlock* const successor_;
125  Label return_label_;
126
127  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86_64);
128};
129
130class BoundsCheckSlowPathX86_64 : public SlowPathCode {
131 public:
132  BoundsCheckSlowPathX86_64(HBoundsCheck* instruction,
133                            Location index_location,
134                            Location length_location)
135      : instruction_(instruction),
136        index_location_(index_location),
137        length_location_(length_location) {}
138
139  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
140    CodeGeneratorX86_64* x64_codegen = reinterpret_cast<CodeGeneratorX86_64*>(codegen);
141    __ Bind(GetEntryLabel());
142    InvokeRuntimeCallingConvention calling_convention;
143    x64_codegen->Move(X86_64CpuLocation(calling_convention.GetRegisterAt(0)), index_location_);
144    x64_codegen->Move(X86_64CpuLocation(calling_convention.GetRegisterAt(1)), length_location_);
145    __ gs()->call(Address::Absolute(
146        QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pThrowArrayBounds), true));
147    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
148  }
149
150 private:
151  HBoundsCheck* const instruction_;
152  const Location index_location_;
153  const Location length_location_;
154
155  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
156};
157
158#undef __
159#define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())->
160
161inline Condition X86_64Condition(IfCondition cond) {
162  switch (cond) {
163    case kCondEQ: return kEqual;
164    case kCondNE: return kNotEqual;
165    case kCondLT: return kLess;
166    case kCondLE: return kLessEqual;
167    case kCondGT: return kGreater;
168    case kCondGE: return kGreaterEqual;
169    default:
170      LOG(FATAL) << "Unknown if condition";
171  }
172  return kEqual;
173}
174
175void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
176  stream << X86_64ManagedRegister::FromCpuRegister(Register(reg));
177}
178
179void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
180  stream << X86_64ManagedRegister::FromXmmRegister(FloatRegister(reg));
181}
182
183void CodeGeneratorX86_64::SaveCoreRegister(Location stack_location, uint32_t reg_id) {
184  __ movq(Address(CpuRegister(RSP), stack_location.GetStackIndex()), CpuRegister(reg_id));
185}
186
187void CodeGeneratorX86_64::RestoreCoreRegister(Location stack_location, uint32_t reg_id) {
188  __ movq(CpuRegister(reg_id), Address(CpuRegister(RSP), stack_location.GetStackIndex()));
189}
190
191CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph)
192      : CodeGenerator(graph, kNumberOfRegIds),
193        location_builder_(graph, this),
194        instruction_visitor_(graph, this),
195        move_resolver_(graph->GetArena(), this) {}
196
197size_t CodeGeneratorX86_64::FrameEntrySpillSize() const {
198  return kNumberOfPushedRegistersAtEntry * kX86_64WordSize;
199}
200
201InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph,
202                                                               CodeGeneratorX86_64* codegen)
203      : HGraphVisitor(graph),
204        assembler_(codegen->GetAssembler()),
205        codegen_(codegen) {}
206
207ManagedRegister CodeGeneratorX86_64::AllocateFreeRegister(Primitive::Type type,
208                                                          bool* blocked_registers) const {
209  switch (type) {
210    case Primitive::kPrimLong:
211    case Primitive::kPrimByte:
212    case Primitive::kPrimBoolean:
213    case Primitive::kPrimChar:
214    case Primitive::kPrimShort:
215    case Primitive::kPrimInt:
216    case Primitive::kPrimNot: {
217      size_t reg = AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters);
218      return X86_64ManagedRegister::FromCpuRegister(static_cast<Register>(reg));
219    }
220
221    case Primitive::kPrimFloat:
222    case Primitive::kPrimDouble:
223      LOG(FATAL) << "Unimplemented register type " << type;
224
225    case Primitive::kPrimVoid:
226      LOG(FATAL) << "Unreachable type " << type;
227  }
228
229  return ManagedRegister::NoRegister();
230}
231
232void CodeGeneratorX86_64::SetupBlockedRegisters(bool* blocked_registers) const {
233  // Stack register is always reserved.
234  blocked_registers[RSP] = true;
235
236  // Block the register used as TMP.
237  blocked_registers[TMP] = true;
238
239  // TODO: We currently don't use Quick's callee saved registers.
240  blocked_registers[RBX] = true;
241  blocked_registers[RBP] = true;
242  blocked_registers[R12] = true;
243  blocked_registers[R13] = true;
244  blocked_registers[R14] = true;
245  blocked_registers[R15] = true;
246}
247
248void CodeGeneratorX86_64::GenerateFrameEntry() {
249  // Create a fake register to mimic Quick.
250  static const int kFakeReturnRegister = 16;
251  core_spill_mask_ |= (1 << kFakeReturnRegister);
252
253  bool skip_overflow_check = IsLeafMethod()
254      && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kX86_64);
255
256  if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
257    __ testq(CpuRegister(RAX), Address(
258        CpuRegister(RSP), -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86_64))));
259    RecordPcInfo(nullptr, 0);
260  }
261
262  // The return PC has already been pushed on the stack.
263  __ subq(CpuRegister(RSP),
264          Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
265
266  if (!skip_overflow_check && kExplicitStackOverflowCheck) {
267    SlowPathCode* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86_64();
268    AddSlowPath(slow_path);
269
270    __ gs()->cmpq(CpuRegister(RSP),
271                  Address::Absolute(Thread::StackEndOffset<kX86_64WordSize>(), true));
272    __ j(kLess, slow_path->GetEntryLabel());
273  }
274
275  __ movl(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
276}
277
278void CodeGeneratorX86_64::GenerateFrameExit() {
279  __ addq(CpuRegister(RSP),
280          Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
281}
282
283void CodeGeneratorX86_64::Bind(Label* label) {
284  __ Bind(label);
285}
286
287void InstructionCodeGeneratorX86_64::LoadCurrentMethod(CpuRegister reg) {
288  __ movl(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
289}
290
291Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
292  switch (load->GetType()) {
293    case Primitive::kPrimLong:
294      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
295      break;
296
297    case Primitive::kPrimInt:
298    case Primitive::kPrimNot:
299      return Location::StackSlot(GetStackSlot(load->GetLocal()));
300
301    case Primitive::kPrimFloat:
302    case Primitive::kPrimDouble:
303      LOG(FATAL) << "Unimplemented type " << load->GetType();
304
305    case Primitive::kPrimBoolean:
306    case Primitive::kPrimByte:
307    case Primitive::kPrimChar:
308    case Primitive::kPrimShort:
309    case Primitive::kPrimVoid:
310      LOG(FATAL) << "Unexpected type " << load->GetType();
311  }
312
313  LOG(FATAL) << "Unreachable";
314  return Location();
315}
316
317void CodeGeneratorX86_64::Move(Location destination, Location source) {
318  if (source.Equals(destination)) {
319    return;
320  }
321  if (destination.IsRegister()) {
322    if (source.IsRegister()) {
323      __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
324    } else if (source.IsStackSlot()) {
325      __ movl(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
326    } else {
327      DCHECK(source.IsDoubleStackSlot());
328      __ movq(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
329    }
330  } else if (destination.IsStackSlot()) {
331    if (source.IsRegister()) {
332      __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
333    } else {
334      DCHECK(source.IsStackSlot());
335      __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
336      __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
337    }
338  } else {
339    DCHECK(destination.IsDoubleStackSlot());
340    if (source.IsRegister()) {
341      __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
342    } else {
343      DCHECK(source.IsDoubleStackSlot());
344      __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
345      __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
346    }
347  }
348}
349
350void CodeGeneratorX86_64::Move(HInstruction* instruction,
351                               Location location,
352                               HInstruction* move_for) {
353  if (instruction->AsIntConstant() != nullptr) {
354    Immediate imm(instruction->AsIntConstant()->GetValue());
355    if (location.IsRegister()) {
356      __ movl(location.AsX86_64().AsCpuRegister(), imm);
357    } else {
358      __ movl(Address(CpuRegister(RSP), location.GetStackIndex()), imm);
359    }
360  } else if (instruction->AsLongConstant() != nullptr) {
361    int64_t value = instruction->AsLongConstant()->GetValue();
362    if (location.IsRegister()) {
363      __ movq(location.AsX86_64().AsCpuRegister(), Immediate(value));
364    } else {
365      __ movq(CpuRegister(TMP), Immediate(value));
366      __ movq(Address(CpuRegister(RSP), location.GetStackIndex()), CpuRegister(TMP));
367    }
368  } else if (instruction->AsLoadLocal() != nullptr) {
369    switch (instruction->GetType()) {
370      case Primitive::kPrimBoolean:
371      case Primitive::kPrimByte:
372      case Primitive::kPrimChar:
373      case Primitive::kPrimShort:
374      case Primitive::kPrimInt:
375      case Primitive::kPrimNot:
376        Move(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
377        break;
378
379      case Primitive::kPrimLong:
380        Move(location, Location::DoubleStackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
381        break;
382
383      default:
384        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
385    }
386  } else {
387    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
388    switch (instruction->GetType()) {
389      case Primitive::kPrimBoolean:
390      case Primitive::kPrimByte:
391      case Primitive::kPrimChar:
392      case Primitive::kPrimShort:
393      case Primitive::kPrimInt:
394      case Primitive::kPrimNot:
395      case Primitive::kPrimLong:
396        Move(location, instruction->GetLocations()->Out());
397        break;
398
399      default:
400        LOG(FATAL) << "Unimplemented type " << instruction->GetType();
401    }
402  }
403}
404
405void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
406  got->SetLocations(nullptr);
407}
408
409void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
410  HBasicBlock* successor = got->GetSuccessor();
411  DCHECK(!successor->IsExitBlock());
412
413  HBasicBlock* block = got->GetBlock();
414  HInstruction* previous = got->GetPrevious();
415
416  HLoopInformation* info = block->GetLoopInformation();
417  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
418    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
419    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
420    return;
421  }
422
423  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
424    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
425  }
426  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
427    __ jmp(codegen_->GetLabelOf(successor));
428  }
429}
430
431void LocationsBuilderX86_64::VisitExit(HExit* exit) {
432  exit->SetLocations(nullptr);
433}
434
435void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
436  if (kIsDebugBuild) {
437    __ Comment("Unreachable");
438    __ int3();
439  }
440}
441
442void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
443  LocationSummary* locations =
444      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
445  HInstruction* cond = if_instr->InputAt(0);
446  DCHECK(cond->IsCondition());
447  HCondition* condition = cond->AsCondition();
448  if (condition->NeedsMaterialization()) {
449    locations->SetInAt(0, Location::Any(), Location::kDiesAtEntry);
450  }
451}
452
453void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
454  HInstruction* cond = if_instr->InputAt(0);
455  DCHECK(cond->IsCondition());
456  HCondition* condition = cond->AsCondition();
457  if (condition->NeedsMaterialization()) {
458    // Moves do not affect the eflags register, so if the condition is evaluated
459    // just before the if, we don't need to evaluate it again.
460    if (!condition->IsBeforeWhenDisregardMoves(if_instr)) {
461      // Materialized condition, compare against 0.
462      Location lhs = if_instr->GetLocations()->InAt(0);
463      if (lhs.IsRegister()) {
464        __ cmpl(lhs.AsX86_64().AsCpuRegister(), Immediate(0));
465      } else {
466        __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
467      }
468    }
469    __ j(kNotEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
470  } else {
471    Location lhs = condition->GetLocations()->InAt(0);
472    Location rhs = condition->GetLocations()->InAt(1);
473    if (rhs.IsRegister()) {
474      __ cmpl(lhs.AsX86_64().AsCpuRegister(), rhs.AsX86_64().AsCpuRegister());
475    } else if (rhs.IsConstant()) {
476      __ cmpl(lhs.AsX86_64().AsCpuRegister(),
477              Immediate(rhs.GetConstant()->AsIntConstant()->GetValue()));
478    } else {
479      __ cmpl(lhs.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
480    }
481    __ j(X86_64Condition(condition->GetCondition()),
482         codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
483  }
484  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
485    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
486  }
487}
488
489void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
490  local->SetLocations(nullptr);
491}
492
493void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
494  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
495}
496
497void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
498  local->SetLocations(nullptr);
499}
500
501void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) {
502  // Nothing to do, this is driven by the code generator.
503}
504
505void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
506  LocationSummary* locations =
507      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
508  switch (store->InputAt(1)->GetType()) {
509    case Primitive::kPrimBoolean:
510    case Primitive::kPrimByte:
511    case Primitive::kPrimChar:
512    case Primitive::kPrimShort:
513    case Primitive::kPrimInt:
514    case Primitive::kPrimNot:
515      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
516      break;
517
518    case Primitive::kPrimLong:
519      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
520      break;
521
522    default:
523      LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
524  }
525}
526
527void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) {
528}
529
530void LocationsBuilderX86_64::VisitCondition(HCondition* comp) {
531  LocationSummary* locations =
532      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
533  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
534  locations->SetInAt(1, Location::Any(), Location::kDiesAtEntry);
535  if (comp->NeedsMaterialization()) {
536    locations->SetOut(Location::RequiresRegister());
537  }
538}
539
540void InstructionCodeGeneratorX86_64::VisitCondition(HCondition* comp) {
541  if (comp->NeedsMaterialization()) {
542    LocationSummary* locations = comp->GetLocations();
543    CpuRegister reg = locations->Out().AsX86_64().AsCpuRegister();
544    // Clear register: setcc only sets the low byte.
545    __ xorq(reg, reg);
546    if (locations->InAt(1).IsRegister()) {
547      __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
548              locations->InAt(1).AsX86_64().AsCpuRegister());
549    } else if (locations->InAt(1).IsConstant()) {
550      __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
551              Immediate(locations->InAt(1).GetConstant()->AsIntConstant()->GetValue()));
552    } else {
553      __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
554              Address(CpuRegister(RSP), locations->InAt(1).GetStackIndex()));
555    }
556    __ setcc(X86_64Condition(comp->GetCondition()), reg);
557  }
558}
559
560void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
561  VisitCondition(comp);
562}
563
564void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
565  VisitCondition(comp);
566}
567
568void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
569  VisitCondition(comp);
570}
571
572void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
573  VisitCondition(comp);
574}
575
576void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
577  VisitCondition(comp);
578}
579
580void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
581  VisitCondition(comp);
582}
583
584void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
585  VisitCondition(comp);
586}
587
588void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
589  VisitCondition(comp);
590}
591
592void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
593  VisitCondition(comp);
594}
595
596void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
597  VisitCondition(comp);
598}
599
600void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
601  VisitCondition(comp);
602}
603
604void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
605  VisitCondition(comp);
606}
607
608void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
609  LocationSummary* locations =
610      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
611  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
612  locations->SetInAt(1, Location::RequiresRegister(), Location::kDiesAtEntry);
613  locations->SetOut(Location::RequiresRegister());
614}
615
616void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
617  Label greater, done;
618  LocationSummary* locations = compare->GetLocations();
619  switch (compare->InputAt(0)->GetType()) {
620    case Primitive::kPrimLong:
621      __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
622              locations->InAt(1).AsX86_64().AsCpuRegister());
623      break;
624    default:
625      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
626  }
627
628  __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(0));
629  __ j(kEqual, &done);
630  __ j(kGreater, &greater);
631
632  __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(-1));
633  __ jmp(&done);
634
635  __ Bind(&greater);
636  __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
637
638  __ Bind(&done);
639}
640
641void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
642  LocationSummary* locations =
643      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
644  locations->SetOut(Location::ConstantLocation(constant));
645}
646
647void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) {
648}
649
650void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
651  LocationSummary* locations =
652      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
653  locations->SetOut(Location::ConstantLocation(constant));
654}
655
656void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) {
657}
658
659void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
660  ret->SetLocations(nullptr);
661}
662
663void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) {
664  codegen_->GenerateFrameExit();
665  __ ret();
666}
667
668void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
669  LocationSummary* locations =
670      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
671  switch (ret->InputAt(0)->GetType()) {
672    case Primitive::kPrimBoolean:
673    case Primitive::kPrimByte:
674    case Primitive::kPrimChar:
675    case Primitive::kPrimShort:
676    case Primitive::kPrimInt:
677    case Primitive::kPrimNot:
678    case Primitive::kPrimLong:
679      locations->SetInAt(0, X86_64CpuLocation(RAX));
680      break;
681
682    default:
683      LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
684  }
685}
686
687void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
688  if (kIsDebugBuild) {
689    switch (ret->InputAt(0)->GetType()) {
690      case Primitive::kPrimBoolean:
691      case Primitive::kPrimByte:
692      case Primitive::kPrimChar:
693      case Primitive::kPrimShort:
694      case Primitive::kPrimInt:
695      case Primitive::kPrimNot:
696      case Primitive::kPrimLong:
697        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), RAX);
698        break;
699
700      default:
701        LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
702    }
703  }
704  codegen_->GenerateFrameExit();
705  __ ret();
706}
707
708Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
709  switch (type) {
710    case Primitive::kPrimBoolean:
711    case Primitive::kPrimByte:
712    case Primitive::kPrimChar:
713    case Primitive::kPrimShort:
714    case Primitive::kPrimInt:
715    case Primitive::kPrimNot: {
716      uint32_t index = gp_index_++;
717      stack_index_++;
718      if (index < calling_convention.GetNumberOfRegisters()) {
719        return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
720      } else {
721        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
722      }
723    }
724
725    case Primitive::kPrimLong: {
726      uint32_t index = gp_index_;
727      stack_index_ += 2;
728      if (index < calling_convention.GetNumberOfRegisters()) {
729        gp_index_ += 1;
730        return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
731      } else {
732        gp_index_ += 2;
733        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
734      }
735    }
736
737    case Primitive::kPrimDouble:
738    case Primitive::kPrimFloat:
739      LOG(FATAL) << "Unimplemented parameter type " << type;
740      break;
741
742    case Primitive::kPrimVoid:
743      LOG(FATAL) << "Unexpected parameter type " << type;
744      break;
745  }
746  return Location();
747}
748
749void LocationsBuilderX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
750  HandleInvoke(invoke);
751}
752
753void InstructionCodeGeneratorX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
754  CpuRegister temp = invoke->GetLocations()->GetTemp(0).AsX86_64().AsCpuRegister();
755  uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
756  size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).SizeValue() +
757      invoke->GetIndexInDexCache() * heap_reference_size;
758
759  // TODO: Implement all kinds of calls:
760  // 1) boot -> boot
761  // 2) app -> boot
762  // 3) app -> app
763  //
764  // Currently we implement the app -> app logic, which looks up in the resolve cache.
765
766  // temp = method;
767  LoadCurrentMethod(temp);
768  // temp = temp->dex_cache_resolved_methods_;
769  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
770  // temp = temp[index_in_cache]
771  __ movl(temp, Address(temp, index_in_cache));
772  // (temp + offset_of_quick_compiled_code)()
773  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
774
775  DCHECK(!codegen_->IsLeafMethod());
776  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
777}
778
779void LocationsBuilderX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
780  HandleInvoke(invoke);
781}
782
783void LocationsBuilderX86_64::HandleInvoke(HInvoke* invoke) {
784  LocationSummary* locations =
785      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
786  locations->AddTemp(X86_64CpuLocation(RDI));
787
788  InvokeDexCallingConventionVisitor calling_convention_visitor;
789  for (size_t i = 0; i < invoke->InputCount(); i++) {
790    HInstruction* input = invoke->InputAt(i);
791    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
792  }
793
794  switch (invoke->GetType()) {
795    case Primitive::kPrimBoolean:
796    case Primitive::kPrimByte:
797    case Primitive::kPrimChar:
798    case Primitive::kPrimShort:
799    case Primitive::kPrimInt:
800    case Primitive::kPrimNot:
801    case Primitive::kPrimLong:
802      locations->SetOut(X86_64CpuLocation(RAX));
803      break;
804
805    case Primitive::kPrimVoid:
806      break;
807
808    case Primitive::kPrimDouble:
809    case Primitive::kPrimFloat:
810      LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
811      break;
812  }
813}
814
815void InstructionCodeGeneratorX86_64::VisitInvokeVirtual(HInvokeVirtual* invoke) {
816  CpuRegister temp = invoke->GetLocations()->GetTemp(0).AsX86_64().AsCpuRegister();
817  size_t method_offset = mirror::Class::EmbeddedVTableOffset().SizeValue() +
818          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
819  LocationSummary* locations = invoke->GetLocations();
820  Location receiver = locations->InAt(0);
821  size_t class_offset = mirror::Object::ClassOffset().SizeValue();
822  // temp = object->GetClass();
823  if (receiver.IsStackSlot()) {
824    __ movq(temp, Address(CpuRegister(RSP), receiver.GetStackIndex()));
825    __ movq(temp, Address(temp, class_offset));
826  } else {
827    __ movq(temp, Address(receiver.AsX86_64().AsCpuRegister(), class_offset));
828  }
829  // temp = temp->GetMethodAt(method_offset);
830  __ movl(temp, Address(temp, method_offset));
831  // call temp->GetEntryPoint();
832  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
833
834  DCHECK(!codegen_->IsLeafMethod());
835  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
836}
837
838void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
839  LocationSummary* locations =
840      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
841  switch (add->GetResultType()) {
842    case Primitive::kPrimInt: {
843      locations->SetInAt(0, Location::RequiresRegister());
844      locations->SetInAt(1, Location::Any());
845      locations->SetOut(Location::SameAsFirstInput());
846      break;
847    }
848    case Primitive::kPrimLong: {
849      locations->SetInAt(0, Location::RequiresRegister());
850      locations->SetInAt(1, Location::RequiresRegister());
851      locations->SetOut(Location::SameAsFirstInput());
852      break;
853    }
854
855    case Primitive::kPrimBoolean:
856    case Primitive::kPrimByte:
857    case Primitive::kPrimChar:
858    case Primitive::kPrimShort:
859      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
860      break;
861
862    default:
863      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
864  }
865}
866
867void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
868  LocationSummary* locations = add->GetLocations();
869  DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
870            locations->Out().AsX86_64().AsCpuRegister().AsRegister());
871  switch (add->GetResultType()) {
872    case Primitive::kPrimInt: {
873      if (locations->InAt(1).IsRegister()) {
874        __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(),
875                locations->InAt(1).AsX86_64().AsCpuRegister());
876      } else if (locations->InAt(1).IsConstant()) {
877        HConstant* instruction = locations->InAt(1).GetConstant();
878        Immediate imm(instruction->AsIntConstant()->GetValue());
879        __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(), imm);
880      } else {
881        __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(),
882                Address(CpuRegister(RSP), locations->InAt(1).GetStackIndex()));
883      }
884      break;
885    }
886    case Primitive::kPrimLong: {
887      __ addq(locations->InAt(0).AsX86_64().AsCpuRegister(),
888              locations->InAt(1).AsX86_64().AsCpuRegister());
889      break;
890    }
891
892    case Primitive::kPrimBoolean:
893    case Primitive::kPrimByte:
894    case Primitive::kPrimChar:
895    case Primitive::kPrimShort:
896      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
897      break;
898
899    default:
900      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
901  }
902}
903
904void LocationsBuilderX86_64::VisitSub(HSub* sub) {
905  LocationSummary* locations =
906      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
907  switch (sub->GetResultType()) {
908    case Primitive::kPrimInt: {
909      locations->SetInAt(0, Location::RequiresRegister());
910      locations->SetInAt(1, Location::Any());
911      locations->SetOut(Location::SameAsFirstInput());
912      break;
913    }
914    case Primitive::kPrimLong: {
915      locations->SetInAt(0, Location::RequiresRegister());
916      locations->SetInAt(1, Location::RequiresRegister());
917      locations->SetOut(Location::SameAsFirstInput());
918      break;
919    }
920
921    case Primitive::kPrimBoolean:
922    case Primitive::kPrimByte:
923    case Primitive::kPrimChar:
924    case Primitive::kPrimShort:
925      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
926      break;
927
928    default:
929      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
930  }
931}
932
933void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
934  LocationSummary* locations = sub->GetLocations();
935  DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
936            locations->Out().AsX86_64().AsCpuRegister().AsRegister());
937  switch (sub->GetResultType()) {
938    case Primitive::kPrimInt: {
939      if (locations->InAt(1).IsRegister()) {
940        __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(),
941                locations->InAt(1).AsX86_64().AsCpuRegister());
942      } else if (locations->InAt(1).IsConstant()) {
943        HConstant* instruction = locations->InAt(1).GetConstant();
944        Immediate imm(instruction->AsIntConstant()->GetValue());
945        __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(), imm);
946      } else {
947        __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(),
948                Address(CpuRegister(RSP), locations->InAt(1).GetStackIndex()));
949      }
950      break;
951    }
952    case Primitive::kPrimLong: {
953      __ subq(locations->InAt(0).AsX86_64().AsCpuRegister(),
954              locations->InAt(1).AsX86_64().AsCpuRegister());
955      break;
956    }
957
958    case Primitive::kPrimBoolean:
959    case Primitive::kPrimByte:
960    case Primitive::kPrimChar:
961    case Primitive::kPrimShort:
962      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
963      break;
964
965    default:
966      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
967  }
968}
969
970void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
971  LocationSummary* locations =
972      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
973  InvokeRuntimeCallingConvention calling_convention;
974  locations->AddTemp(X86_64CpuLocation(calling_convention.GetRegisterAt(0)));
975  locations->AddTemp(X86_64CpuLocation(calling_convention.GetRegisterAt(1)));
976  locations->SetOut(X86_64CpuLocation(RAX));
977}
978
979void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
980  InvokeRuntimeCallingConvention calling_convention;
981  LoadCurrentMethod(CpuRegister(calling_convention.GetRegisterAt(1)));
982  __ movq(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(instruction->GetTypeIndex()));
983
984  __ gs()->call(Address::Absolute(
985      QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocObjectWithAccessCheck), true));
986
987  DCHECK(!codegen_->IsLeafMethod());
988  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
989}
990
991void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
992  LocationSummary* locations =
993      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
994  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
995  if (location.IsStackSlot()) {
996    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
997  } else if (location.IsDoubleStackSlot()) {
998    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
999  }
1000  locations->SetOut(location);
1001}
1002
1003void InstructionCodeGeneratorX86_64::VisitParameterValue(HParameterValue* instruction) {
1004  // Nothing to do, the parameter is already at its location.
1005}
1006
1007void LocationsBuilderX86_64::VisitNot(HNot* instruction) {
1008  LocationSummary* locations =
1009      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1010  locations->SetInAt(0, Location::RequiresRegister());
1011  locations->SetOut(Location::SameAsFirstInput());
1012}
1013
1014void InstructionCodeGeneratorX86_64::VisitNot(HNot* instruction) {
1015  LocationSummary* locations = instruction->GetLocations();
1016  DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
1017            locations->Out().AsX86_64().AsCpuRegister().AsRegister());
1018  __ xorq(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
1019}
1020
1021void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
1022  LocationSummary* locations =
1023      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1024  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1025    locations->SetInAt(i, Location::Any());
1026  }
1027  locations->SetOut(Location::Any());
1028}
1029
1030void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) {
1031  LOG(FATAL) << "Unimplemented";
1032}
1033
1034void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1035  LocationSummary* locations =
1036      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1037  Primitive::Type field_type = instruction->GetFieldType();
1038  bool is_object_type = field_type == Primitive::kPrimNot;
1039  bool dies_at_entry = !is_object_type;
1040  locations->SetInAt(0, Location::RequiresRegister(), dies_at_entry);
1041  locations->SetInAt(1, Location::RequiresRegister(), dies_at_entry);
1042  if (is_object_type) {
1043    // Temporary registers for the write barrier.
1044    locations->AddTemp(Location::RequiresRegister());
1045    locations->AddTemp(Location::RequiresRegister());
1046  }
1047}
1048
1049void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1050  LocationSummary* locations = instruction->GetLocations();
1051  CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister();
1052  CpuRegister value = locations->InAt(1).AsX86_64().AsCpuRegister();
1053  size_t offset = instruction->GetFieldOffset().SizeValue();
1054  Primitive::Type field_type = instruction->GetFieldType();
1055
1056  switch (field_type) {
1057    case Primitive::kPrimBoolean:
1058    case Primitive::kPrimByte: {
1059      __ movb(Address(obj, offset), value);
1060      break;
1061    }
1062
1063    case Primitive::kPrimShort:
1064    case Primitive::kPrimChar: {
1065      __ movw(Address(obj, offset), value);
1066      break;
1067    }
1068
1069    case Primitive::kPrimInt:
1070    case Primitive::kPrimNot: {
1071      __ movl(Address(obj, offset), value);
1072      if (field_type == Primitive::kPrimNot) {
1073        CpuRegister temp = locations->GetTemp(0).AsX86_64().AsCpuRegister();
1074        CpuRegister card = locations->GetTemp(1).AsX86_64().AsCpuRegister();
1075        codegen_->MarkGCCard(temp, card, obj, value);
1076      }
1077      break;
1078    }
1079
1080    case Primitive::kPrimLong: {
1081      __ movq(Address(obj, offset), value);
1082      break;
1083    }
1084
1085    case Primitive::kPrimFloat:
1086    case Primitive::kPrimDouble:
1087      LOG(FATAL) << "Unimplemented register type " << field_type;
1088
1089    case Primitive::kPrimVoid:
1090      LOG(FATAL) << "Unreachable type " << field_type;
1091  }
1092}
1093
1094void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1095  LocationSummary* locations =
1096      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1097  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1098  locations->SetOut(Location::RequiresRegister());
1099}
1100
1101void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1102  LocationSummary* locations = instruction->GetLocations();
1103  CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister();
1104  CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
1105  size_t offset = instruction->GetFieldOffset().SizeValue();
1106
1107  switch (instruction->GetType()) {
1108    case Primitive::kPrimBoolean: {
1109      __ movzxb(out, Address(obj, offset));
1110      break;
1111    }
1112
1113    case Primitive::kPrimByte: {
1114      __ movsxb(out, Address(obj, offset));
1115      break;
1116    }
1117
1118    case Primitive::kPrimShort: {
1119      __ movsxw(out, Address(obj, offset));
1120      break;
1121    }
1122
1123    case Primitive::kPrimChar: {
1124      __ movzxw(out, Address(obj, offset));
1125      break;
1126    }
1127
1128    case Primitive::kPrimInt:
1129    case Primitive::kPrimNot: {
1130      __ movl(out, Address(obj, offset));
1131      break;
1132    }
1133
1134    case Primitive::kPrimLong: {
1135      __ movq(out, Address(obj, offset));
1136      break;
1137    }
1138
1139    case Primitive::kPrimFloat:
1140    case Primitive::kPrimDouble:
1141      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1142
1143    case Primitive::kPrimVoid:
1144      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1145  }
1146}
1147
1148void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
1149  LocationSummary* locations =
1150      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1151  locations->SetInAt(0, Location::Any());
1152  if (instruction->HasUses()) {
1153    locations->SetOut(Location::SameAsFirstInput());
1154  }
1155}
1156
1157void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
1158  SlowPathCode* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction);
1159  codegen_->AddSlowPath(slow_path);
1160
1161  LocationSummary* locations = instruction->GetLocations();
1162  Location obj = locations->InAt(0);
1163
1164  if (obj.IsRegister()) {
1165    __ cmpl(obj.AsX86_64().AsCpuRegister(), Immediate(0));
1166  } else if (obj.IsStackSlot()) {
1167    __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
1168  } else {
1169    DCHECK(obj.IsConstant()) << obj;
1170    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1171    __ jmp(slow_path->GetEntryLabel());
1172    return;
1173  }
1174  __ j(kEqual, slow_path->GetEntryLabel());
1175}
1176
1177void LocationsBuilderX86_64::VisitArrayGet(HArrayGet* instruction) {
1178  LocationSummary* locations =
1179      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1180  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1181  locations->SetInAt(
1182      1, Location::RegisterOrConstant(instruction->InputAt(1)), Location::kDiesAtEntry);
1183  locations->SetOut(Location::RequiresRegister());
1184}
1185
1186void InstructionCodeGeneratorX86_64::VisitArrayGet(HArrayGet* instruction) {
1187  LocationSummary* locations = instruction->GetLocations();
1188  CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister();
1189  Location index = locations->InAt(1);
1190
1191  switch (instruction->GetType()) {
1192    case Primitive::kPrimBoolean: {
1193      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1194      CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
1195      if (index.IsConstant()) {
1196        __ movzxb(out, Address(obj,
1197            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1198      } else {
1199        __ movzxb(out, Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_1, data_offset));
1200      }
1201      break;
1202    }
1203
1204    case Primitive::kPrimByte: {
1205      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
1206      CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
1207      if (index.IsConstant()) {
1208        __ movsxb(out, Address(obj,
1209            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset));
1210      } else {
1211        __ movsxb(out, Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_1, data_offset));
1212      }
1213      break;
1214    }
1215
1216    case Primitive::kPrimShort: {
1217      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
1218      CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
1219      if (index.IsConstant()) {
1220        __ movsxw(out, Address(obj,
1221            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1222      } else {
1223        __ movsxw(out, Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_2, data_offset));
1224      }
1225      break;
1226    }
1227
1228    case Primitive::kPrimChar: {
1229      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1230      CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
1231      if (index.IsConstant()) {
1232        __ movzxw(out, Address(obj,
1233            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset));
1234      } else {
1235        __ movzxw(out, Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_2, data_offset));
1236      }
1237      break;
1238    }
1239
1240    case Primitive::kPrimInt:
1241    case Primitive::kPrimNot: {
1242      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
1243      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1244      CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
1245      if (index.IsConstant()) {
1246        __ movl(out, Address(obj,
1247            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset));
1248      } else {
1249        __ movl(out, Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_4, data_offset));
1250      }
1251      break;
1252    }
1253
1254    case Primitive::kPrimLong: {
1255      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1256      CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
1257      if (index.IsConstant()) {
1258        __ movq(out, Address(obj,
1259            (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset));
1260      } else {
1261        __ movq(out, Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_8, data_offset));
1262      }
1263      break;
1264    }
1265
1266    case Primitive::kPrimFloat:
1267    case Primitive::kPrimDouble:
1268      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1269
1270    case Primitive::kPrimVoid:
1271      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1272  }
1273}
1274
1275void LocationsBuilderX86_64::VisitArraySet(HArraySet* instruction) {
1276  Primitive::Type value_type = instruction->GetComponentType();
1277  bool is_object = value_type == Primitive::kPrimNot;
1278  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1279      instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1280  if (is_object) {
1281    InvokeRuntimeCallingConvention calling_convention;
1282    locations->SetInAt(0, X86_64CpuLocation(calling_convention.GetRegisterAt(0)));
1283    locations->SetInAt(1, X86_64CpuLocation(calling_convention.GetRegisterAt(1)));
1284    locations->SetInAt(2, X86_64CpuLocation(calling_convention.GetRegisterAt(2)));
1285  } else {
1286    locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1287    locations->SetInAt(
1288        1, Location::RegisterOrConstant(instruction->InputAt(1)), Location::kDiesAtEntry);
1289    locations->SetInAt(2, Location::RequiresRegister(), Location::kDiesAtEntry);
1290    if (value_type == Primitive::kPrimLong) {
1291      locations->SetInAt(2, Location::RequiresRegister(), Location::kDiesAtEntry);
1292    } else {
1293      locations->SetInAt(2, Location::RegisterOrConstant(instruction->InputAt(2)), Location::kDiesAtEntry);
1294    }
1295  }
1296}
1297
1298void InstructionCodeGeneratorX86_64::VisitArraySet(HArraySet* instruction) {
1299  LocationSummary* locations = instruction->GetLocations();
1300  CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister();
1301  Location index = locations->InAt(1);
1302  Location value = locations->InAt(2);
1303  Primitive::Type value_type = instruction->GetComponentType();
1304
1305  switch (value_type) {
1306    case Primitive::kPrimBoolean:
1307    case Primitive::kPrimByte: {
1308      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1309      if (index.IsConstant()) {
1310        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1311        if (value.IsRegister()) {
1312          __ movb(Address(obj, offset), value.AsX86_64().AsCpuRegister());
1313        } else {
1314          __ movb(Address(obj, offset), Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1315        }
1316      } else {
1317        if (value.IsRegister()) {
1318          __ movb(Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_1, data_offset),
1319                  value.AsX86_64().AsCpuRegister());
1320        } else {
1321          __ movb(Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_1, data_offset),
1322                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1323        }
1324      }
1325      break;
1326    }
1327
1328    case Primitive::kPrimShort:
1329    case Primitive::kPrimChar: {
1330      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1331      if (index.IsConstant()) {
1332        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1333        if (value.IsRegister()) {
1334          __ movw(Address(obj, offset), value.AsX86_64().AsCpuRegister());
1335        } else {
1336          __ movw(Address(obj, offset), Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1337        }
1338      } else {
1339        if (value.IsRegister()) {
1340          __ movw(Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_2, data_offset),
1341                  value.AsX86_64().AsCpuRegister());
1342        } else {
1343          __ movw(Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_2, data_offset),
1344                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1345        }
1346      }
1347      break;
1348    }
1349
1350    case Primitive::kPrimInt: {
1351      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1352      if (index.IsConstant()) {
1353        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1354        if (value.IsRegister()) {
1355          __ movl(Address(obj, offset), value.AsX86_64().AsCpuRegister());
1356        } else {
1357          __ movl(Address(obj, offset), Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1358        }
1359      } else {
1360        if (value.IsRegister()) {
1361          __ movl(Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_4, data_offset),
1362                  value.AsX86_64().AsCpuRegister());
1363        } else {
1364          __ movl(Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_4, data_offset),
1365                  Immediate(value.GetConstant()->AsIntConstant()->GetValue()));
1366        }
1367      }
1368      break;
1369    }
1370
1371    case Primitive::kPrimNot: {
1372      __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAputObject), true));
1373      DCHECK(!codegen_->IsLeafMethod());
1374      codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1375      break;
1376    }
1377
1378    case Primitive::kPrimLong: {
1379      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1380      if (index.IsConstant()) {
1381        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1382        DCHECK(value.IsRegister());
1383        __ movq(Address(obj, offset), value.AsX86_64().AsCpuRegister());
1384      } else {
1385        DCHECK(value.IsRegister());
1386        __ movq(Address(obj, index.AsX86_64().AsCpuRegister(), TIMES_8, data_offset),
1387                value.AsX86_64().AsCpuRegister());
1388      }
1389      break;
1390    }
1391
1392    case Primitive::kPrimFloat:
1393    case Primitive::kPrimDouble:
1394      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1395
1396    case Primitive::kPrimVoid:
1397      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1398  }
1399}
1400
1401void LocationsBuilderX86_64::VisitArrayLength(HArrayLength* instruction) {
1402  LocationSummary* locations =
1403      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1404  locations->SetInAt(0, Location::RequiresRegister(), Location::kDiesAtEntry);
1405  locations->SetOut(Location::RequiresRegister());
1406}
1407
1408void InstructionCodeGeneratorX86_64::VisitArrayLength(HArrayLength* instruction) {
1409  LocationSummary* locations = instruction->GetLocations();
1410  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
1411  CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister();
1412  CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
1413  __ movl(out, Address(obj, offset));
1414}
1415
1416void LocationsBuilderX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
1417  LocationSummary* locations =
1418      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1419  locations->SetInAt(0, Location::RequiresRegister());
1420  locations->SetInAt(1, Location::RequiresRegister());
1421  if (instruction->HasUses()) {
1422    locations->SetOut(Location::SameAsFirstInput());
1423  }
1424}
1425
1426void InstructionCodeGeneratorX86_64::VisitBoundsCheck(HBoundsCheck* instruction) {
1427  LocationSummary* locations = instruction->GetLocations();
1428  SlowPathCode* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathX86_64(
1429      instruction, locations->InAt(0), locations->InAt(1));
1430  codegen_->AddSlowPath(slow_path);
1431
1432  CpuRegister index = locations->InAt(0).AsX86_64().AsCpuRegister();
1433  CpuRegister length = locations->InAt(1).AsX86_64().AsCpuRegister();
1434
1435  __ cmpl(index, length);
1436  __ j(kAboveEqual, slow_path->GetEntryLabel());
1437}
1438
1439void CodeGeneratorX86_64::MarkGCCard(CpuRegister temp,
1440                                     CpuRegister card,
1441                                     CpuRegister object,
1442                                     CpuRegister value) {
1443  Label is_null;
1444  __ testl(value, value);
1445  __ j(kEqual, &is_null);
1446  __ gs()->movq(card, Address::Absolute(
1447      Thread::CardTableOffset<kX86_64WordSize>().Int32Value(), true));
1448  __ movq(temp, object);
1449  __ shrq(temp, Immediate(gc::accounting::CardTable::kCardShift));
1450  __ movb(Address(temp, card, TIMES_1, 0),  card);
1451  __ Bind(&is_null);
1452}
1453
1454void LocationsBuilderX86_64::VisitTemporary(HTemporary* temp) {
1455  temp->SetLocations(nullptr);
1456}
1457
1458void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp) {
1459  // Nothing to do, this is driven by the code generator.
1460}
1461
1462void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) {
1463  LOG(FATAL) << "Unimplemented";
1464}
1465
1466void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
1467  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1468}
1469
1470void LocationsBuilderX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
1471  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1472}
1473
1474void InstructionCodeGeneratorX86_64::VisitSuspendCheck(HSuspendCheck* instruction) {
1475  HBasicBlock* block = instruction->GetBlock();
1476  if (block->GetLoopInformation() != nullptr) {
1477    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
1478    // The back edge will generate the suspend check.
1479    return;
1480  }
1481  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
1482    // The goto will generate the suspend check.
1483    return;
1484  }
1485  GenerateSuspendCheck(instruction, nullptr);
1486}
1487
1488void InstructionCodeGeneratorX86_64::GenerateSuspendCheck(HSuspendCheck* instruction,
1489                                                          HBasicBlock* successor) {
1490  SuspendCheckSlowPathX86_64* slow_path =
1491      new (GetGraph()->GetArena()) SuspendCheckSlowPathX86_64(instruction, successor);
1492  codegen_->AddSlowPath(slow_path);
1493  __ gs()->cmpw(Address::Absolute(
1494      Thread::ThreadFlagsOffset<kX86_64WordSize>().Int32Value(), true), Immediate(0));
1495  if (successor == nullptr) {
1496    __ j(kNotEqual, slow_path->GetEntryLabel());
1497    __ Bind(slow_path->GetReturnLabel());
1498  } else {
1499    __ j(kEqual, codegen_->GetLabelOf(successor));
1500    __ jmp(slow_path->GetEntryLabel());
1501  }
1502}
1503
1504X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
1505  return codegen_->GetAssembler();
1506}
1507
1508void ParallelMoveResolverX86_64::EmitMove(size_t index) {
1509  MoveOperands* move = moves_.Get(index);
1510  Location source = move->GetSource();
1511  Location destination = move->GetDestination();
1512
1513  if (source.IsRegister()) {
1514    if (destination.IsRegister()) {
1515      __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
1516    } else if (destination.IsStackSlot()) {
1517      __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
1518              source.AsX86_64().AsCpuRegister());
1519    } else {
1520      DCHECK(destination.IsDoubleStackSlot());
1521      __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
1522              source.AsX86_64().AsCpuRegister());
1523    }
1524  } else if (source.IsStackSlot()) {
1525    if (destination.IsRegister()) {
1526      __ movl(destination.AsX86_64().AsX86_64().AsCpuRegister(),
1527              Address(CpuRegister(RSP), source.GetStackIndex()));
1528    } else {
1529      DCHECK(destination.IsStackSlot());
1530      __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1531      __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
1532    }
1533  } else if (source.IsDoubleStackSlot()) {
1534    if (destination.IsRegister()) {
1535      __ movq(destination.AsX86_64().AsX86_64().AsCpuRegister(),
1536              Address(CpuRegister(RSP), source.GetStackIndex()));
1537    } else {
1538      DCHECK(destination.IsDoubleStackSlot());
1539      __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1540      __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
1541    }
1542  } else if (source.IsConstant()) {
1543    HConstant* constant = source.GetConstant();
1544    if (constant->IsIntConstant()) {
1545      Immediate imm(constant->AsIntConstant()->GetValue());
1546      if (destination.IsRegister()) {
1547        __ movl(destination.AsX86_64().AsCpuRegister(), imm);
1548      } else {
1549        __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
1550      }
1551    } else if (constant->IsLongConstant()) {
1552      int64_t value = constant->AsLongConstant()->GetValue();
1553      if (destination.IsRegister()) {
1554        __ movq(destination.AsX86_64().AsCpuRegister(), Immediate(value));
1555      } else {
1556        __ movq(CpuRegister(TMP), Immediate(value));
1557        __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
1558      }
1559    } else {
1560      LOG(FATAL) << "Unimplemented constant type";
1561    }
1562  } else {
1563    LOG(FATAL) << "Unimplemented";
1564  }
1565}
1566
1567void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
1568  __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
1569  __ movl(Address(CpuRegister(RSP), mem), reg);
1570  __ movl(reg, CpuRegister(TMP));
1571}
1572
1573void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
1574  ScratchRegisterScope ensure_scratch(
1575      this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
1576
1577  int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
1578  __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
1579  __ movl(CpuRegister(ensure_scratch.GetRegister()),
1580          Address(CpuRegister(RSP), mem2 + stack_offset));
1581  __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
1582  __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
1583          CpuRegister(ensure_scratch.GetRegister()));
1584}
1585
1586void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
1587  __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
1588  __ movq(Address(CpuRegister(RSP), mem), reg);
1589  __ movq(reg, CpuRegister(TMP));
1590}
1591
1592void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
1593  ScratchRegisterScope ensure_scratch(
1594      this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
1595
1596  int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
1597  __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
1598  __ movq(CpuRegister(ensure_scratch.GetRegister()),
1599          Address(CpuRegister(RSP), mem2 + stack_offset));
1600  __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
1601  __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
1602          CpuRegister(ensure_scratch.GetRegister()));
1603}
1604
1605void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
1606  MoveOperands* move = moves_.Get(index);
1607  Location source = move->GetSource();
1608  Location destination = move->GetDestination();
1609
1610  if (source.IsRegister() && destination.IsRegister()) {
1611    __ xchgq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
1612  } else if (source.IsRegister() && destination.IsStackSlot()) {
1613    Exchange32(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
1614  } else if (source.IsStackSlot() && destination.IsRegister()) {
1615    Exchange32(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
1616  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1617    Exchange32(destination.GetStackIndex(), source.GetStackIndex());
1618  } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
1619    Exchange64(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
1620  } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
1621    Exchange64(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
1622  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
1623    Exchange64(destination.GetStackIndex(), source.GetStackIndex());
1624  } else {
1625    LOG(FATAL) << "Unimplemented";
1626  }
1627}
1628
1629
1630void ParallelMoveResolverX86_64::SpillScratch(int reg) {
1631  __ pushq(CpuRegister(reg));
1632}
1633
1634
1635void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
1636  __ popq(CpuRegister(reg));
1637}
1638
1639}  // namespace x86_64
1640}  // namespace art
1641