code_generator_x86_64.cc revision 96f89a290eb67d7bf4b1636798fa28df14309cc7
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86_64.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "mirror/array.h"
21#include "mirror/art_method.h"
22#include "mirror/object_reference.h"
23#include "thread.h"
24#include "utils/assembler.h"
25#include "utils/x86_64/assembler_x86_64.h"
26#include "utils/x86_64/managed_register_x86_64.h"
27
28namespace art {
29
30x86_64::X86_64ManagedRegister Location::AsX86_64() const {
31  return reg().AsX86_64();
32}
33
34namespace x86_64 {
35
36#define __ reinterpret_cast<X86_64Assembler*>(codegen->GetAssembler())->
37
38class NullCheckSlowPathX86_64 : public SlowPathCode {
39 public:
40  explicit NullCheckSlowPathX86_64(uint32_t dex_pc) : dex_pc_(dex_pc) {}
41
42  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
43    __ Bind(GetEntryLabel());
44    __ gs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pThrowNullPointer), true));
45    codegen->RecordPcInfo(dex_pc_);
46  }
47
48 private:
49  const uint32_t dex_pc_;
50  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86_64);
51};
52
53#undef __
54#define __ reinterpret_cast<X86_64Assembler*>(GetAssembler())->
55
56inline Condition X86_64Condition(IfCondition cond) {
57  switch (cond) {
58    case kCondEQ: return kEqual;
59    case kCondNE: return kNotEqual;
60    case kCondLT: return kLess;
61    case kCondLE: return kLessEqual;
62    case kCondGT: return kGreater;
63    case kCondGE: return kGreaterEqual;
64    default:
65      LOG(FATAL) << "Unknown if condition";
66  }
67  return kEqual;
68}
69
70// Some x86_64 instructions require a register to be available as temp.
71static constexpr Register TMP = R11;
72
73static constexpr int kNumberOfPushedRegistersAtEntry = 1;
74static constexpr int kCurrentMethodStackOffset = 0;
75
76void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
77  stream << X86_64ManagedRegister::FromCpuRegister(Register(reg));
78}
79
80void CodeGeneratorX86_64::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
81  stream << X86_64ManagedRegister::FromXmmRegister(FloatRegister(reg));
82}
83
84static Location X86_64CpuLocation(Register reg) {
85  return Location::RegisterLocation(X86_64ManagedRegister::FromCpuRegister(reg));
86}
87
88CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph)
89      : CodeGenerator(graph, kNumberOfRegIds),
90        location_builder_(graph, this),
91        instruction_visitor_(graph, this),
92        move_resolver_(graph->GetArena(), this) {}
93
94size_t CodeGeneratorX86_64::FrameEntrySpillSize() const {
95  return kNumberOfPushedRegistersAtEntry * kX86_64WordSize;
96}
97
98InstructionCodeGeneratorX86_64::InstructionCodeGeneratorX86_64(HGraph* graph, CodeGeneratorX86_64* codegen)
99      : HGraphVisitor(graph),
100        assembler_(codegen->GetAssembler()),
101        codegen_(codegen) {}
102
103ManagedRegister CodeGeneratorX86_64::AllocateFreeRegister(Primitive::Type type,
104                                                          bool* blocked_registers) const {
105  switch (type) {
106    case Primitive::kPrimLong:
107    case Primitive::kPrimByte:
108    case Primitive::kPrimBoolean:
109    case Primitive::kPrimChar:
110    case Primitive::kPrimShort:
111    case Primitive::kPrimInt:
112    case Primitive::kPrimNot: {
113      size_t reg = AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters);
114      return X86_64ManagedRegister::FromCpuRegister(static_cast<Register>(reg));
115    }
116
117    case Primitive::kPrimFloat:
118    case Primitive::kPrimDouble:
119      LOG(FATAL) << "Unimplemented register type " << type;
120
121    case Primitive::kPrimVoid:
122      LOG(FATAL) << "Unreachable type " << type;
123  }
124
125  return ManagedRegister::NoRegister();
126}
127
128void CodeGeneratorX86_64::SetupBlockedRegisters(bool* blocked_registers) const {
129  // Stack register is always reserved.
130  blocked_registers[RSP] = true;
131
132  // Block the register used as TMP.
133  blocked_registers[TMP] = true;
134
135  // TODO: We currently don't use Quick's callee saved registers.
136  blocked_registers[RBX] = true;
137  blocked_registers[RBP] = true;
138  blocked_registers[R12] = true;
139  blocked_registers[R13] = true;
140  blocked_registers[R14] = true;
141  blocked_registers[R15] = true;
142}
143
144void CodeGeneratorX86_64::GenerateFrameEntry() {
145  // Create a fake register to mimic Quick.
146  static const int kFakeReturnRegister = 16;
147  core_spill_mask_ |= (1 << kFakeReturnRegister);
148
149  // The return PC has already been pushed on the stack.
150  __ subq(CpuRegister(RSP), Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
151  __ movl(Address(CpuRegister(RSP), kCurrentMethodStackOffset), CpuRegister(RDI));
152}
153
154void CodeGeneratorX86_64::GenerateFrameExit() {
155  __ addq(CpuRegister(RSP),
156          Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86_64WordSize));
157}
158
159void CodeGeneratorX86_64::Bind(Label* label) {
160  __ Bind(label);
161}
162
163void InstructionCodeGeneratorX86_64::LoadCurrentMethod(CpuRegister reg) {
164  __ movl(reg, Address(CpuRegister(RSP), kCurrentMethodStackOffset));
165}
166
167Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
168  switch (load->GetType()) {
169    case Primitive::kPrimLong:
170      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
171      break;
172
173    case Primitive::kPrimInt:
174    case Primitive::kPrimNot:
175      return Location::StackSlot(GetStackSlot(load->GetLocal()));
176
177    case Primitive::kPrimFloat:
178    case Primitive::kPrimDouble:
179      LOG(FATAL) << "Unimplemented type " << load->GetType();
180
181    case Primitive::kPrimBoolean:
182    case Primitive::kPrimByte:
183    case Primitive::kPrimChar:
184    case Primitive::kPrimShort:
185    case Primitive::kPrimVoid:
186      LOG(FATAL) << "Unexpected type " << load->GetType();
187  }
188
189  LOG(FATAL) << "Unreachable";
190  return Location();
191}
192
193void CodeGeneratorX86_64::Move(Location destination, Location source) {
194  if (source.Equals(destination)) {
195    return;
196  }
197  if (destination.IsRegister()) {
198    if (source.IsRegister()) {
199      __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
200    } else if (source.IsStackSlot()) {
201      __ movl(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
202    } else {
203      DCHECK(source.IsDoubleStackSlot());
204      __ movq(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex()));
205    }
206  } else if (destination.IsStackSlot()) {
207    if (source.IsRegister()) {
208      __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
209    } else {
210      DCHECK(source.IsStackSlot());
211      __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
212      __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
213    }
214  } else {
215    DCHECK(destination.IsDoubleStackSlot());
216    if (source.IsRegister()) {
217      __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister());
218    } else {
219      DCHECK(source.IsDoubleStackSlot());
220      __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
221      __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
222    }
223  }
224}
225
226void CodeGeneratorX86_64::Move(HInstruction* instruction,
227                               Location location,
228                               HInstruction* move_for) {
229  if (instruction->AsIntConstant() != nullptr) {
230    Immediate imm(instruction->AsIntConstant()->GetValue());
231    if (location.IsRegister()) {
232      __ movl(location.AsX86_64().AsCpuRegister(), imm);
233    } else {
234      __ movl(Address(CpuRegister(RSP), location.GetStackIndex()), imm);
235    }
236  } else if (instruction->AsLongConstant() != nullptr) {
237    int64_t value = instruction->AsLongConstant()->GetValue();
238    if (location.IsRegister()) {
239      __ movq(location.AsX86_64().AsCpuRegister(), Immediate(value));
240    } else {
241      __ movq(CpuRegister(TMP), Immediate(value));
242      __ movq(Address(CpuRegister(RSP), location.GetStackIndex()), CpuRegister(TMP));
243    }
244  } else if (instruction->AsLoadLocal() != nullptr) {
245    switch (instruction->GetType()) {
246      case Primitive::kPrimBoolean:
247      case Primitive::kPrimByte:
248      case Primitive::kPrimChar:
249      case Primitive::kPrimShort:
250      case Primitive::kPrimInt:
251      case Primitive::kPrimNot:
252        Move(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
253        break;
254
255      case Primitive::kPrimLong:
256        Move(location, Location::DoubleStackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
257        break;
258
259      default:
260        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
261    }
262  } else {
263    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
264    switch (instruction->GetType()) {
265      case Primitive::kPrimBoolean:
266      case Primitive::kPrimByte:
267      case Primitive::kPrimChar:
268      case Primitive::kPrimShort:
269      case Primitive::kPrimInt:
270      case Primitive::kPrimNot:
271      case Primitive::kPrimLong:
272        Move(location, instruction->GetLocations()->Out());
273        break;
274
275      default:
276        LOG(FATAL) << "Unimplemented type " << instruction->GetType();
277    }
278  }
279}
280
281void LocationsBuilderX86_64::VisitGoto(HGoto* got) {
282  got->SetLocations(nullptr);
283}
284
285void InstructionCodeGeneratorX86_64::VisitGoto(HGoto* got) {
286  HBasicBlock* successor = got->GetSuccessor();
287  if (GetGraph()->GetExitBlock() == successor) {
288    codegen_->GenerateFrameExit();
289  } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
290    __ jmp(codegen_->GetLabelOf(successor));
291  }
292}
293
294void LocationsBuilderX86_64::VisitExit(HExit* exit) {
295  exit->SetLocations(nullptr);
296}
297
298void InstructionCodeGeneratorX86_64::VisitExit(HExit* exit) {
299  if (kIsDebugBuild) {
300    __ Comment("Unreachable");
301    __ int3();
302  }
303}
304
305void LocationsBuilderX86_64::VisitIf(HIf* if_instr) {
306  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
307  HInstruction* cond = if_instr->InputAt(0);
308  DCHECK(cond->IsCondition());
309  HCondition* condition = cond->AsCondition();
310  if (condition->NeedsMaterialization()) {
311    locations->SetInAt(0, Location::Any());
312  }
313  if_instr->SetLocations(locations);
314}
315
316void InstructionCodeGeneratorX86_64::VisitIf(HIf* if_instr) {
317  HInstruction* cond = if_instr->InputAt(0);
318  DCHECK(cond->IsCondition());
319  HCondition* condition = cond->AsCondition();
320  if (condition->NeedsMaterialization()) {
321    // Materialized condition, compare against 0.
322    Location lhs = if_instr->GetLocations()->InAt(0);
323    if (lhs.IsRegister()) {
324      __ cmpl(lhs.AsX86_64().AsCpuRegister(), Immediate(0));
325    } else {
326      __ cmpl(Address(CpuRegister(RSP), lhs.GetStackIndex()), Immediate(0));
327    }
328    __ j(kEqual, codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
329  } else {
330    Location lhs = condition->GetLocations()->InAt(0);
331    Location rhs = condition->GetLocations()->InAt(1);
332    if (rhs.IsRegister()) {
333      __ cmpl(lhs.AsX86_64().AsCpuRegister(), rhs.AsX86_64().AsCpuRegister());
334    } else if (rhs.IsConstant()) {
335      __ cmpl(lhs.AsX86_64().AsCpuRegister(),
336              Immediate(rhs.GetConstant()->AsIntConstant()->GetValue()));
337    } else {
338      __ cmpl(lhs.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), rhs.GetStackIndex()));
339    }
340    __ j(X86_64Condition(condition->GetCondition()),
341         codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
342  }
343  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
344    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
345  }
346}
347
348void LocationsBuilderX86_64::VisitLocal(HLocal* local) {
349  local->SetLocations(nullptr);
350}
351
352void InstructionCodeGeneratorX86_64::VisitLocal(HLocal* local) {
353  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
354}
355
356void LocationsBuilderX86_64::VisitLoadLocal(HLoadLocal* local) {
357  local->SetLocations(nullptr);
358}
359
360void InstructionCodeGeneratorX86_64::VisitLoadLocal(HLoadLocal* load) {
361  // Nothing to do, this is driven by the code generator.
362}
363
364void LocationsBuilderX86_64::VisitStoreLocal(HStoreLocal* store) {
365  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
366  switch (store->InputAt(1)->GetType()) {
367    case Primitive::kPrimBoolean:
368    case Primitive::kPrimByte:
369    case Primitive::kPrimChar:
370    case Primitive::kPrimShort:
371    case Primitive::kPrimInt:
372    case Primitive::kPrimNot:
373      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
374      break;
375
376    case Primitive::kPrimLong:
377      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
378      break;
379
380    default:
381      LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
382  }
383  store->SetLocations(locations);
384}
385
386void InstructionCodeGeneratorX86_64::VisitStoreLocal(HStoreLocal* store) {
387}
388
389void LocationsBuilderX86_64::VisitCondition(HCondition* comp) {
390  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp);
391  locations->SetInAt(0, Location::RequiresRegister());
392  locations->SetInAt(1, Location::Any());
393  if (comp->NeedsMaterialization()) {
394    locations->SetOut(Location::RequiresRegister());
395  }
396  comp->SetLocations(locations);
397}
398
399void InstructionCodeGeneratorX86_64::VisitCondition(HCondition* comp) {
400  if (comp->NeedsMaterialization()) {
401    LocationSummary* locations = comp->GetLocations();
402    if (locations->InAt(1).IsRegister()) {
403      __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
404              locations->InAt(1).AsX86_64().AsCpuRegister());
405    } else if (locations->InAt(1).IsConstant()) {
406      __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
407              Immediate(locations->InAt(1).GetConstant()->AsIntConstant()->GetValue()));
408    } else {
409      __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
410              Address(CpuRegister(RSP), locations->InAt(1).GetStackIndex()));
411    }
412    __ setcc(X86_64Condition(comp->GetCondition()),
413             comp->GetLocations()->Out().AsX86_64().AsCpuRegister());
414  }
415}
416
417void LocationsBuilderX86_64::VisitEqual(HEqual* comp) {
418  VisitCondition(comp);
419}
420
421void InstructionCodeGeneratorX86_64::VisitEqual(HEqual* comp) {
422  VisitCondition(comp);
423}
424
425void LocationsBuilderX86_64::VisitNotEqual(HNotEqual* comp) {
426  VisitCondition(comp);
427}
428
429void InstructionCodeGeneratorX86_64::VisitNotEqual(HNotEqual* comp) {
430  VisitCondition(comp);
431}
432
433void LocationsBuilderX86_64::VisitLessThan(HLessThan* comp) {
434  VisitCondition(comp);
435}
436
437void InstructionCodeGeneratorX86_64::VisitLessThan(HLessThan* comp) {
438  VisitCondition(comp);
439}
440
441void LocationsBuilderX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
442  VisitCondition(comp);
443}
444
445void InstructionCodeGeneratorX86_64::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
446  VisitCondition(comp);
447}
448
449void LocationsBuilderX86_64::VisitGreaterThan(HGreaterThan* comp) {
450  VisitCondition(comp);
451}
452
453void InstructionCodeGeneratorX86_64::VisitGreaterThan(HGreaterThan* comp) {
454  VisitCondition(comp);
455}
456
457void LocationsBuilderX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
458  VisitCondition(comp);
459}
460
461void InstructionCodeGeneratorX86_64::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
462  VisitCondition(comp);
463}
464
465void LocationsBuilderX86_64::VisitCompare(HCompare* compare) {
466  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
467  locations->SetInAt(0, Location::RequiresRegister());
468  locations->SetInAt(1, Location::RequiresRegister());
469  locations->SetOut(Location::RequiresRegister());
470  compare->SetLocations(locations);
471}
472
473void InstructionCodeGeneratorX86_64::VisitCompare(HCompare* compare) {
474  Label greater, done;
475  LocationSummary* locations = compare->GetLocations();
476  switch (compare->InputAt(0)->GetType()) {
477    case Primitive::kPrimLong:
478      __ cmpq(locations->InAt(0).AsX86_64().AsCpuRegister(),
479              locations->InAt(1).AsX86_64().AsCpuRegister());
480      break;
481    default:
482      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
483  }
484
485  __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(0));
486  __ j(kEqual, &done);
487  __ j(kGreater, &greater);
488
489  __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(-1));
490  __ jmp(&done);
491
492  __ Bind(&greater);
493  __ movl(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
494
495  __ Bind(&done);
496}
497
498void LocationsBuilderX86_64::VisitIntConstant(HIntConstant* constant) {
499  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
500  locations->SetOut(Location::ConstantLocation(constant));
501  constant->SetLocations(locations);
502}
503
504void InstructionCodeGeneratorX86_64::VisitIntConstant(HIntConstant* constant) {
505}
506
507void LocationsBuilderX86_64::VisitLongConstant(HLongConstant* constant) {
508  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
509  locations->SetOut(Location::ConstantLocation(constant));
510  constant->SetLocations(locations);
511}
512
513void InstructionCodeGeneratorX86_64::VisitLongConstant(HLongConstant* constant) {
514}
515
516void LocationsBuilderX86_64::VisitReturnVoid(HReturnVoid* ret) {
517  ret->SetLocations(nullptr);
518}
519
520void InstructionCodeGeneratorX86_64::VisitReturnVoid(HReturnVoid* ret) {
521  codegen_->GenerateFrameExit();
522  __ ret();
523}
524
525void LocationsBuilderX86_64::VisitReturn(HReturn* ret) {
526  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
527  switch (ret->InputAt(0)->GetType()) {
528    case Primitive::kPrimBoolean:
529    case Primitive::kPrimByte:
530    case Primitive::kPrimChar:
531    case Primitive::kPrimShort:
532    case Primitive::kPrimInt:
533    case Primitive::kPrimNot:
534    case Primitive::kPrimLong:
535      locations->SetInAt(0, X86_64CpuLocation(RAX));
536      break;
537
538    default:
539      LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
540  }
541  ret->SetLocations(locations);
542}
543
544void InstructionCodeGeneratorX86_64::VisitReturn(HReturn* ret) {
545  if (kIsDebugBuild) {
546    switch (ret->InputAt(0)->GetType()) {
547      case Primitive::kPrimBoolean:
548      case Primitive::kPrimByte:
549      case Primitive::kPrimChar:
550      case Primitive::kPrimShort:
551      case Primitive::kPrimInt:
552      case Primitive::kPrimNot:
553      case Primitive::kPrimLong:
554        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86_64().AsCpuRegister().AsRegister(), RAX);
555        break;
556
557      default:
558        LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
559    }
560  }
561  codegen_->GenerateFrameExit();
562  __ ret();
563}
564
565static constexpr Register kRuntimeParameterCoreRegisters[] = { RDI, RSI, RDX };
566static constexpr size_t kRuntimeParameterCoreRegistersLength =
567    arraysize(kRuntimeParameterCoreRegisters);
568
569class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
570 public:
571  InvokeRuntimeCallingConvention()
572      : CallingConvention(kRuntimeParameterCoreRegisters,
573                          kRuntimeParameterCoreRegistersLength) {}
574
575 private:
576  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
577};
578
579Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
580  switch (type) {
581    case Primitive::kPrimBoolean:
582    case Primitive::kPrimByte:
583    case Primitive::kPrimChar:
584    case Primitive::kPrimShort:
585    case Primitive::kPrimInt:
586    case Primitive::kPrimNot: {
587      uint32_t index = gp_index_++;
588      stack_index_++;
589      if (index < calling_convention.GetNumberOfRegisters()) {
590        return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
591      } else {
592        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 1));
593      }
594    }
595
596    case Primitive::kPrimLong: {
597      uint32_t index = gp_index_;
598      stack_index_ += 2;
599      if (index < calling_convention.GetNumberOfRegisters()) {
600        gp_index_ += 1;
601        return X86_64CpuLocation(calling_convention.GetRegisterAt(index));
602      } else {
603        gp_index_ += 2;
604        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index_ - 2));
605      }
606    }
607
608    case Primitive::kPrimDouble:
609    case Primitive::kPrimFloat:
610      LOG(FATAL) << "Unimplemented parameter type " << type;
611      break;
612
613    case Primitive::kPrimVoid:
614      LOG(FATAL) << "Unexpected parameter type " << type;
615      break;
616  }
617  return Location();
618}
619
620void LocationsBuilderX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
621  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
622  locations->AddTemp(X86_64CpuLocation(RDI));
623
624  InvokeDexCallingConventionVisitor calling_convention_visitor;
625  for (size_t i = 0; i < invoke->InputCount(); ++i) {
626    HInstruction* input = invoke->InputAt(i);
627    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
628  }
629
630  switch (invoke->GetType()) {
631    case Primitive::kPrimBoolean:
632    case Primitive::kPrimByte:
633    case Primitive::kPrimChar:
634    case Primitive::kPrimShort:
635    case Primitive::kPrimInt:
636    case Primitive::kPrimNot:
637    case Primitive::kPrimLong:
638      locations->SetOut(X86_64CpuLocation(RAX));
639      break;
640
641    case Primitive::kPrimVoid:
642      break;
643
644    case Primitive::kPrimDouble:
645    case Primitive::kPrimFloat:
646      LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
647      break;
648  }
649
650  invoke->SetLocations(locations);
651}
652
653void InstructionCodeGeneratorX86_64::VisitInvokeStatic(HInvokeStatic* invoke) {
654  CpuRegister temp = invoke->GetLocations()->GetTemp(0).AsX86_64().AsCpuRegister();
655  uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
656  size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).SizeValue() +
657      invoke->GetIndexInDexCache() * heap_reference_size;
658
659  // TODO: Implement all kinds of calls:
660  // 1) boot -> boot
661  // 2) app -> boot
662  // 3) app -> app
663  //
664  // Currently we implement the app -> app logic, which looks up in the resolve cache.
665
666  // temp = method;
667  LoadCurrentMethod(temp);
668  // temp = temp->dex_cache_resolved_methods_;
669  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().SizeValue()));
670  // temp = temp[index_in_cache]
671  __ movl(temp, Address(temp, index_in_cache));
672  // (temp + offset_of_quick_compiled_code)()
673  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().SizeValue()));
674
675  codegen_->RecordPcInfo(invoke->GetDexPc());
676}
677
678void LocationsBuilderX86_64::VisitAdd(HAdd* add) {
679  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
680  switch (add->GetResultType()) {
681    case Primitive::kPrimInt: {
682      locations->SetInAt(0, Location::RequiresRegister());
683      locations->SetInAt(1, Location::Any());
684      locations->SetOut(Location::SameAsFirstInput());
685      break;
686    }
687    case Primitive::kPrimLong: {
688      locations->SetInAt(0, Location::RequiresRegister());
689      locations->SetInAt(1, Location::RequiresRegister());
690      locations->SetOut(Location::SameAsFirstInput());
691      break;
692    }
693
694    case Primitive::kPrimBoolean:
695    case Primitive::kPrimByte:
696    case Primitive::kPrimChar:
697    case Primitive::kPrimShort:
698      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
699      break;
700
701    default:
702      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
703  }
704  add->SetLocations(locations);
705}
706
707void InstructionCodeGeneratorX86_64::VisitAdd(HAdd* add) {
708  LocationSummary* locations = add->GetLocations();
709  DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
710            locations->Out().AsX86_64().AsCpuRegister().AsRegister());
711  switch (add->GetResultType()) {
712    case Primitive::kPrimInt: {
713      if (locations->InAt(1).IsRegister()) {
714        __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(),
715                locations->InAt(1).AsX86_64().AsCpuRegister());
716      } else if (locations->InAt(1).IsConstant()) {
717        HConstant* instruction = locations->InAt(1).GetConstant();
718        Immediate imm(instruction->AsIntConstant()->GetValue());
719        __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(), imm);
720      } else {
721        __ addl(locations->InAt(0).AsX86_64().AsCpuRegister(),
722                Address(CpuRegister(RSP), locations->InAt(1).GetStackIndex()));
723      }
724      break;
725    }
726    case Primitive::kPrimLong: {
727      __ addq(locations->InAt(0).AsX86_64().AsCpuRegister(),
728              locations->InAt(1).AsX86_64().AsCpuRegister());
729      break;
730    }
731
732    case Primitive::kPrimBoolean:
733    case Primitive::kPrimByte:
734    case Primitive::kPrimChar:
735    case Primitive::kPrimShort:
736      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
737      break;
738
739    default:
740      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
741  }
742}
743
744void LocationsBuilderX86_64::VisitSub(HSub* sub) {
745  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
746  switch (sub->GetResultType()) {
747    case Primitive::kPrimInt: {
748      locations->SetInAt(0, Location::RequiresRegister());
749      locations->SetInAt(1, Location::Any());
750      locations->SetOut(Location::SameAsFirstInput());
751      break;
752    }
753    case Primitive::kPrimLong: {
754      locations->SetInAt(0, Location::RequiresRegister());
755      locations->SetInAt(1, Location::RequiresRegister());
756      locations->SetOut(Location::SameAsFirstInput());
757      break;
758    }
759
760    case Primitive::kPrimBoolean:
761    case Primitive::kPrimByte:
762    case Primitive::kPrimChar:
763    case Primitive::kPrimShort:
764      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
765      break;
766
767    default:
768      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
769  }
770  sub->SetLocations(locations);
771}
772
773void InstructionCodeGeneratorX86_64::VisitSub(HSub* sub) {
774  LocationSummary* locations = sub->GetLocations();
775  DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
776            locations->Out().AsX86_64().AsCpuRegister().AsRegister());
777  switch (sub->GetResultType()) {
778    case Primitive::kPrimInt: {
779      if (locations->InAt(1).IsRegister()) {
780        __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(),
781                locations->InAt(1).AsX86_64().AsCpuRegister());
782      } else if (locations->InAt(1).IsConstant()) {
783        HConstant* instruction = locations->InAt(1).GetConstant();
784        Immediate imm(instruction->AsIntConstant()->GetValue());
785        __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(), imm);
786      } else {
787        __ subl(locations->InAt(0).AsX86_64().AsCpuRegister(),
788                Address(CpuRegister(RSP), locations->InAt(1).GetStackIndex()));
789      }
790      break;
791    }
792    case Primitive::kPrimLong: {
793      __ subq(locations->InAt(0).AsX86_64().AsCpuRegister(),
794              locations->InAt(1).AsX86_64().AsCpuRegister());
795      break;
796    }
797
798    case Primitive::kPrimBoolean:
799    case Primitive::kPrimByte:
800    case Primitive::kPrimChar:
801    case Primitive::kPrimShort:
802      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
803      break;
804
805    default:
806      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
807  }
808}
809
810void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
811  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
812  locations->SetOut(X86_64CpuLocation(RAX));
813  instruction->SetLocations(locations);
814}
815
816void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction) {
817  InvokeRuntimeCallingConvention calling_convention;
818  LoadCurrentMethod(CpuRegister(calling_convention.GetRegisterAt(1)));
819  __ movq(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(instruction->GetTypeIndex()));
820
821  __ gs()->call(Address::Absolute(
822      QUICK_ENTRYPOINT_OFFSET(kX86_64WordSize, pAllocObjectWithAccessCheck), true));
823
824  codegen_->RecordPcInfo(instruction->GetDexPc());
825}
826
827void LocationsBuilderX86_64::VisitParameterValue(HParameterValue* instruction) {
828  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
829  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
830  if (location.IsStackSlot()) {
831    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
832  } else if (location.IsDoubleStackSlot()) {
833    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
834  }
835  locations->SetOut(location);
836  instruction->SetLocations(locations);
837}
838
839void InstructionCodeGeneratorX86_64::VisitParameterValue(HParameterValue* instruction) {
840  // Nothing to do, the parameter is already at its location.
841}
842
843void LocationsBuilderX86_64::VisitNot(HNot* instruction) {
844  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
845  locations->SetInAt(0, Location::RequiresRegister());
846  locations->SetOut(Location::SameAsFirstInput());
847  instruction->SetLocations(locations);
848}
849
850void InstructionCodeGeneratorX86_64::VisitNot(HNot* instruction) {
851  LocationSummary* locations = instruction->GetLocations();
852  DCHECK_EQ(locations->InAt(0).AsX86_64().AsCpuRegister().AsRegister(),
853            locations->Out().AsX86_64().AsCpuRegister().AsRegister());
854  __ xorq(locations->Out().AsX86_64().AsCpuRegister(), Immediate(1));
855}
856
857void LocationsBuilderX86_64::VisitPhi(HPhi* instruction) {
858  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
859  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
860    locations->SetInAt(i, Location::Any());
861  }
862  locations->SetOut(Location::Any());
863  instruction->SetLocations(locations);
864}
865
866void InstructionCodeGeneratorX86_64::VisitPhi(HPhi* instruction) {
867  LOG(FATAL) << "Unimplemented";
868}
869
870void LocationsBuilderX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
871  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
872  locations->SetInAt(0, Location::RequiresRegister());
873  locations->SetInAt(1, Location::RequiresRegister());
874  instruction->SetLocations(locations);
875}
876
877void InstructionCodeGeneratorX86_64::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
878  LocationSummary* locations = instruction->GetLocations();
879  CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister();
880  CpuRegister value = locations->InAt(1).AsX86_64().AsCpuRegister();
881  size_t offset = instruction->GetFieldOffset().SizeValue();
882  Primitive::Type field_type = instruction->InputAt(1)->GetType();
883
884  switch (field_type) {
885    case Primitive::kPrimBoolean:
886    case Primitive::kPrimByte: {
887      __ movb(Address(obj, offset), value);
888      break;
889    }
890
891    case Primitive::kPrimShort:
892    case Primitive::kPrimChar: {
893      __ movw(Address(obj, offset), value);
894      break;
895    }
896
897    case Primitive::kPrimInt:
898    case Primitive::kPrimNot: {
899      __ movl(Address(obj, offset), value);
900      break;
901    }
902
903    case Primitive::kPrimLong: {
904      __ movq(Address(obj, offset), value);
905      break;
906    }
907
908    case Primitive::kPrimFloat:
909    case Primitive::kPrimDouble:
910      LOG(FATAL) << "Unimplemented register type " << field_type;
911
912    case Primitive::kPrimVoid:
913      LOG(FATAL) << "Unreachable type " << field_type;
914  }
915}
916
917void LocationsBuilderX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
918  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
919  locations->SetInAt(0, Location::RequiresRegister());
920  locations->SetOut(Location::RequiresRegister());
921  instruction->SetLocations(locations);
922}
923
924void InstructionCodeGeneratorX86_64::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
925  LocationSummary* locations = instruction->GetLocations();
926  CpuRegister obj = locations->InAt(0).AsX86_64().AsCpuRegister();
927  CpuRegister out = locations->Out().AsX86_64().AsCpuRegister();
928  size_t offset = instruction->GetFieldOffset().SizeValue();
929
930  switch (instruction->GetType()) {
931    case Primitive::kPrimBoolean: {
932      __ movzxb(out, Address(obj, offset));
933      break;
934    }
935
936    case Primitive::kPrimByte: {
937      __ movsxb(out, Address(obj, offset));
938      break;
939    }
940
941    case Primitive::kPrimShort: {
942      __ movsxw(out, Address(obj, offset));
943      break;
944    }
945
946    case Primitive::kPrimChar: {
947      __ movzxw(out, Address(obj, offset));
948      break;
949    }
950
951    case Primitive::kPrimInt:
952    case Primitive::kPrimNot: {
953      __ movl(out, Address(obj, offset));
954      break;
955    }
956
957    case Primitive::kPrimLong: {
958      __ movq(out, Address(obj, offset));
959      break;
960    }
961
962    case Primitive::kPrimFloat:
963    case Primitive::kPrimDouble:
964      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
965
966    case Primitive::kPrimVoid:
967      LOG(FATAL) << "Unreachable type " << instruction->GetType();
968  }
969}
970
971void LocationsBuilderX86_64::VisitNullCheck(HNullCheck* instruction) {
972  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
973  locations->SetInAt(0, Location::Any());
974  // TODO: Have a normalization phase that makes this instruction never used.
975  locations->SetOut(Location::SameAsFirstInput());
976  instruction->SetLocations(locations);
977}
978
979void InstructionCodeGeneratorX86_64::VisitNullCheck(HNullCheck* instruction) {
980  SlowPathCode* slow_path =
981      new (GetGraph()->GetArena()) NullCheckSlowPathX86_64(instruction->GetDexPc());
982  codegen_->AddSlowPath(slow_path);
983
984  LocationSummary* locations = instruction->GetLocations();
985  Location obj = locations->InAt(0);
986  DCHECK(obj.Equals(locations->Out()));
987
988  if (obj.IsRegister()) {
989    __ cmpl(obj.AsX86_64().AsCpuRegister(), Immediate(0));
990  } else {
991    DCHECK(locations->InAt(0).IsStackSlot());
992    __ cmpl(Address(CpuRegister(RSP), obj.GetStackIndex()), Immediate(0));
993  }
994  __ j(kEqual, slow_path->GetEntryLabel());
995}
996
997void LocationsBuilderX86_64::VisitTemporary(HTemporary* temp) {
998  temp->SetLocations(nullptr);
999}
1000
1001void InstructionCodeGeneratorX86_64::VisitTemporary(HTemporary* temp) {
1002  // Nothing to do, this is driven by the code generator.
1003}
1004
1005void LocationsBuilderX86_64::VisitParallelMove(HParallelMove* instruction) {
1006  LOG(FATAL) << "Unimplemented";
1007}
1008
1009void InstructionCodeGeneratorX86_64::VisitParallelMove(HParallelMove* instruction) {
1010  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1011}
1012
1013X86_64Assembler* ParallelMoveResolverX86_64::GetAssembler() const {
1014  return codegen_->GetAssembler();
1015}
1016
1017void ParallelMoveResolverX86_64::EmitMove(size_t index) {
1018  MoveOperands* move = moves_.Get(index);
1019  Location source = move->GetSource();
1020  Location destination = move->GetDestination();
1021
1022  if (source.IsRegister()) {
1023    if (destination.IsRegister()) {
1024      __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
1025    } else if (destination.IsStackSlot()) {
1026      __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()),
1027              source.AsX86_64().AsCpuRegister());
1028    } else {
1029      DCHECK(destination.IsDoubleStackSlot());
1030      __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()),
1031              source.AsX86_64().AsCpuRegister());
1032    }
1033  } else if (source.IsStackSlot()) {
1034    if (destination.IsRegister()) {
1035      __ movl(destination.AsX86_64().AsX86_64().AsCpuRegister(),
1036              Address(CpuRegister(RSP), source.GetStackIndex()));
1037    } else {
1038      DCHECK(destination.IsStackSlot());
1039      __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1040      __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
1041    }
1042  } else if (source.IsDoubleStackSlot()) {
1043    if (destination.IsRegister()) {
1044      __ movq(destination.AsX86_64().AsX86_64().AsCpuRegister(),
1045              Address(CpuRegister(RSP), source.GetStackIndex()));
1046    } else {
1047      DCHECK(destination.IsDoubleStackSlot());
1048      __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), source.GetStackIndex()));
1049      __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
1050    }
1051  } else if (source.IsConstant()) {
1052    HConstant* constant = source.GetConstant();
1053    if (constant->IsIntConstant()) {
1054      Immediate imm(constant->AsIntConstant()->GetValue());
1055      if (destination.IsRegister()) {
1056        __ movl(destination.AsX86_64().AsCpuRegister(), imm);
1057      } else {
1058        __ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), imm);
1059      }
1060    } else if (constant->IsLongConstant()) {
1061      int64_t value = constant->AsLongConstant()->GetValue();
1062      if (destination.IsRegister()) {
1063        __ movq(destination.AsX86_64().AsCpuRegister(), Immediate(value));
1064      } else {
1065        __ movq(CpuRegister(TMP), Immediate(value));
1066        __ movq(Address(CpuRegister(RSP), destination.GetStackIndex()), CpuRegister(TMP));
1067      }
1068    } else {
1069      LOG(FATAL) << "Unimplemented constant type";
1070    }
1071  } else {
1072    LOG(FATAL) << "Unimplemented";
1073  }
1074}
1075
1076void ParallelMoveResolverX86_64::Exchange32(CpuRegister reg, int mem) {
1077  __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
1078  __ movl(Address(CpuRegister(RSP), mem), reg);
1079  __ movl(reg, CpuRegister(TMP));
1080}
1081
1082void ParallelMoveResolverX86_64::Exchange32(int mem1, int mem2) {
1083  ScratchRegisterScope ensure_scratch(
1084      this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
1085
1086  int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
1087  __ movl(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
1088  __ movl(CpuRegister(ensure_scratch.GetRegister()),
1089          Address(CpuRegister(RSP), mem2 + stack_offset));
1090  __ movl(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
1091  __ movl(Address(CpuRegister(RSP), mem1 + stack_offset),
1092          CpuRegister(ensure_scratch.GetRegister()));
1093}
1094
1095void ParallelMoveResolverX86_64::Exchange64(CpuRegister reg, int mem) {
1096  __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem));
1097  __ movq(Address(CpuRegister(RSP), mem), reg);
1098  __ movq(reg, CpuRegister(TMP));
1099}
1100
1101void ParallelMoveResolverX86_64::Exchange64(int mem1, int mem2) {
1102  ScratchRegisterScope ensure_scratch(
1103      this, TMP, RAX, codegen_->GetNumberOfCoreRegisters());
1104
1105  int stack_offset = ensure_scratch.IsSpilled() ? kX86_64WordSize : 0;
1106  __ movq(CpuRegister(TMP), Address(CpuRegister(RSP), mem1 + stack_offset));
1107  __ movq(CpuRegister(ensure_scratch.GetRegister()),
1108          Address(CpuRegister(RSP), mem2 + stack_offset));
1109  __ movq(Address(CpuRegister(RSP), mem2 + stack_offset), CpuRegister(TMP));
1110  __ movq(Address(CpuRegister(RSP), mem1 + stack_offset),
1111          CpuRegister(ensure_scratch.GetRegister()));
1112}
1113
1114void ParallelMoveResolverX86_64::EmitSwap(size_t index) {
1115  MoveOperands* move = moves_.Get(index);
1116  Location source = move->GetSource();
1117  Location destination = move->GetDestination();
1118
1119  if (source.IsRegister() && destination.IsRegister()) {
1120    __ xchgq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister());
1121  } else if (source.IsRegister() && destination.IsStackSlot()) {
1122    Exchange32(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
1123  } else if (source.IsStackSlot() && destination.IsRegister()) {
1124    Exchange32(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
1125  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1126    Exchange32(destination.GetStackIndex(), source.GetStackIndex());
1127  } else if (source.IsRegister() && destination.IsDoubleStackSlot()) {
1128    Exchange64(source.AsX86_64().AsCpuRegister(), destination.GetStackIndex());
1129  } else if (source.IsDoubleStackSlot() && destination.IsRegister()) {
1130    Exchange64(destination.AsX86_64().AsCpuRegister(), source.GetStackIndex());
1131  } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) {
1132    Exchange64(destination.GetStackIndex(), source.GetStackIndex());
1133  } else {
1134    LOG(FATAL) << "Unimplemented";
1135  }
1136}
1137
1138
1139void ParallelMoveResolverX86_64::SpillScratch(int reg) {
1140  __ pushq(CpuRegister(reg));
1141}
1142
1143
1144void ParallelMoveResolverX86_64::RestoreScratch(int reg) {
1145  __ popq(CpuRegister(reg));
1146}
1147
1148}  // namespace x86_64
1149}  // namespace art
1150