code_generator_x86.cc revision 397f2e42beadb77d98e550bd1b25b9b61237c943
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_x86.h"
18#include "gc/accounting/card_table.h"
19#include "utils/assembler.h"
20#include "utils/stack_checks.h"
21#include "utils/x86/assembler_x86.h"
22#include "utils/x86/managed_register_x86.h"
23
24#include "entrypoints/quick/quick_entrypoints.h"
25#include "mirror/array.h"
26#include "mirror/art_method.h"
27#include "thread.h"
28
29namespace art {
30
31x86::X86ManagedRegister Location::AsX86() const {
32  return reg().AsX86();
33}
34
35namespace x86 {
36
37static constexpr bool kExplicitStackOverflowCheck = false;
38
39static constexpr int kNumberOfPushedRegistersAtEntry = 1;
40static constexpr int kCurrentMethodStackOffset = 0;
41
42#define __ reinterpret_cast<X86Assembler*>(codegen->GetAssembler())->
43
44class NullCheckSlowPathX86 : public SlowPathCode {
45 public:
46  explicit NullCheckSlowPathX86(uint32_t dex_pc) : dex_pc_(dex_pc) {}
47
48  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
49    __ Bind(GetEntryLabel());
50    __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowNullPointer)));
51    codegen->RecordPcInfo(dex_pc_);
52  }
53
54 private:
55  const uint32_t dex_pc_;
56  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathX86);
57};
58
59class StackOverflowCheckSlowPathX86 : public SlowPathCode {
60 public:
61  StackOverflowCheckSlowPathX86() {}
62
63  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
64    __ Bind(GetEntryLabel());
65    __ addl(ESP,
66            Immediate(codegen->GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
67    __ fs()->jmp(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pThrowStackOverflow)));
68  }
69
70 private:
71  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathX86);
72};
73
74#undef __
75#define __ reinterpret_cast<X86Assembler*>(GetAssembler())->
76
77inline Condition X86Condition(IfCondition cond) {
78  switch (cond) {
79    case kCondEQ: return kEqual;
80    case kCondNE: return kNotEqual;
81    case kCondLT: return kLess;
82    case kCondLE: return kLessEqual;
83    case kCondGT: return kGreater;
84    case kCondGE: return kGreaterEqual;
85    default:
86      LOG(FATAL) << "Unknown if condition";
87  }
88  return kEqual;
89}
90
91void CodeGeneratorX86::DumpCoreRegister(std::ostream& stream, int reg) const {
92  stream << X86ManagedRegister::FromCpuRegister(Register(reg));
93}
94
95void CodeGeneratorX86::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
96  stream << X86ManagedRegister::FromXmmRegister(XmmRegister(reg));
97}
98
99CodeGeneratorX86::CodeGeneratorX86(HGraph* graph)
100    : CodeGenerator(graph, kNumberOfRegIds),
101      location_builder_(graph, this),
102      instruction_visitor_(graph, this),
103      move_resolver_(graph->GetArena(), this) {}
104
105size_t CodeGeneratorX86::FrameEntrySpillSize() const {
106  return kNumberOfPushedRegistersAtEntry * kX86WordSize;
107}
108
109static bool* GetBlockedRegisterPairs(bool* blocked_registers) {
110  return blocked_registers + kNumberOfAllocIds;
111}
112
113ManagedRegister CodeGeneratorX86::AllocateFreeRegister(Primitive::Type type,
114                                                       bool* blocked_registers) const {
115  switch (type) {
116    case Primitive::kPrimLong: {
117      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
118      size_t reg = AllocateFreeRegisterInternal(blocked_register_pairs, kNumberOfRegisterPairs);
119      X86ManagedRegister pair =
120          X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
121      blocked_registers[pair.AsRegisterPairLow()] = true;
122      blocked_registers[pair.AsRegisterPairHigh()] = true;
123      // Block all other register pairs that share a register with `pair`.
124      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
125        X86ManagedRegister current =
126            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
127        if (current.AsRegisterPairLow() == pair.AsRegisterPairLow()
128            || current.AsRegisterPairLow() == pair.AsRegisterPairHigh()
129            || current.AsRegisterPairHigh() == pair.AsRegisterPairLow()
130            || current.AsRegisterPairHigh() == pair.AsRegisterPairHigh()) {
131          blocked_register_pairs[i] = true;
132        }
133      }
134      return pair;
135    }
136
137    case Primitive::kPrimByte:
138    case Primitive::kPrimBoolean:
139    case Primitive::kPrimChar:
140    case Primitive::kPrimShort:
141    case Primitive::kPrimInt:
142    case Primitive::kPrimNot: {
143      Register reg = static_cast<Register>(
144          AllocateFreeRegisterInternal(blocked_registers, kNumberOfCpuRegisters));
145      // Block all register pairs that contain `reg`.
146      bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
147      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
148        X86ManagedRegister current =
149            X86ManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
150        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
151          blocked_register_pairs[i] = true;
152        }
153      }
154      return X86ManagedRegister::FromCpuRegister(reg);
155    }
156
157    case Primitive::kPrimFloat:
158    case Primitive::kPrimDouble:
159      LOG(FATAL) << "Unimplemented register type " << type;
160
161    case Primitive::kPrimVoid:
162      LOG(FATAL) << "Unreachable type " << type;
163  }
164
165  return ManagedRegister::NoRegister();
166}
167
168void CodeGeneratorX86::SetupBlockedRegisters(bool* blocked_registers) const {
169  bool* blocked_register_pairs = GetBlockedRegisterPairs(blocked_registers);
170
171  // Don't allocate the dalvik style register pair passing.
172  blocked_register_pairs[ECX_EDX] = true;
173
174  // Stack register is always reserved.
175  blocked_registers[ESP] = true;
176
177  // TODO: We currently don't use Quick's callee saved registers.
178  blocked_registers[EBP] = true;
179  blocked_registers[ESI] = true;
180  blocked_registers[EDI] = true;
181  blocked_register_pairs[EAX_EDI] = true;
182  blocked_register_pairs[EDX_EDI] = true;
183  blocked_register_pairs[ECX_EDI] = true;
184  blocked_register_pairs[EBX_EDI] = true;
185}
186
187size_t CodeGeneratorX86::GetNumberOfRegisters() const {
188  return kNumberOfRegIds;
189}
190
191static Location X86CpuLocation(Register reg) {
192  return Location::RegisterLocation(X86ManagedRegister::FromCpuRegister(reg));
193}
194
195InstructionCodeGeneratorX86::InstructionCodeGeneratorX86(HGraph* graph, CodeGeneratorX86* codegen)
196      : HGraphVisitor(graph),
197        assembler_(codegen->GetAssembler()),
198        codegen_(codegen) {}
199
200void CodeGeneratorX86::GenerateFrameEntry() {
201  // Create a fake register to mimic Quick.
202  static const int kFakeReturnRegister = 8;
203  core_spill_mask_ |= (1 << kFakeReturnRegister);
204
205  bool skip_overflow_check = IsLeafMethod() && !IsLargeFrame(GetFrameSize(), InstructionSet::kX86);
206  if (!skip_overflow_check && !kExplicitStackOverflowCheck) {
207    __ testl(EAX, Address(ESP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kX86))));
208    RecordPcInfo(0);
209  }
210
211  // The return PC has already been pushed on the stack.
212  __ subl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
213
214  if (!skip_overflow_check && kExplicitStackOverflowCheck) {
215    SlowPathCode* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathX86();
216    AddSlowPath(slow_path);
217
218    __ fs()->cmpl(ESP, Address::Absolute(Thread::StackEndOffset<kX86WordSize>()));
219    __ j(kLess, slow_path->GetEntryLabel());
220  }
221
222  __ movl(Address(ESP, kCurrentMethodStackOffset), EAX);
223}
224
225void CodeGeneratorX86::GenerateFrameExit() {
226  __ addl(ESP, Immediate(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kX86WordSize));
227}
228
229void CodeGeneratorX86::Bind(Label* label) {
230  __ Bind(label);
231}
232
233void InstructionCodeGeneratorX86::LoadCurrentMethod(Register reg) {
234  __ movl(reg, Address(ESP, kCurrentMethodStackOffset));
235}
236
237Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
238  switch (load->GetType()) {
239    case Primitive::kPrimLong:
240      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
241      break;
242
243    case Primitive::kPrimInt:
244    case Primitive::kPrimNot:
245      return Location::StackSlot(GetStackSlot(load->GetLocal()));
246
247    case Primitive::kPrimFloat:
248    case Primitive::kPrimDouble:
249      LOG(FATAL) << "Unimplemented type " << load->GetType();
250
251    case Primitive::kPrimBoolean:
252    case Primitive::kPrimByte:
253    case Primitive::kPrimChar:
254    case Primitive::kPrimShort:
255    case Primitive::kPrimVoid:
256      LOG(FATAL) << "Unexpected type " << load->GetType();
257  }
258
259  LOG(FATAL) << "Unreachable";
260  return Location();
261}
262
263static constexpr Register kRuntimeParameterCoreRegisters[] = { EAX, ECX, EDX };
264static constexpr size_t kRuntimeParameterCoreRegistersLength =
265    arraysize(kRuntimeParameterCoreRegisters);
266
267class InvokeRuntimeCallingConvention : public CallingConvention<Register> {
268 public:
269  InvokeRuntimeCallingConvention()
270      : CallingConvention(kRuntimeParameterCoreRegisters,
271                          kRuntimeParameterCoreRegistersLength) {}
272
273 private:
274  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
275};
276
277Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
278  switch (type) {
279    case Primitive::kPrimBoolean:
280    case Primitive::kPrimByte:
281    case Primitive::kPrimChar:
282    case Primitive::kPrimShort:
283    case Primitive::kPrimInt:
284    case Primitive::kPrimNot: {
285      uint32_t index = gp_index_++;
286      if (index < calling_convention.GetNumberOfRegisters()) {
287        return X86CpuLocation(calling_convention.GetRegisterAt(index));
288      } else {
289        return Location::StackSlot(calling_convention.GetStackOffsetOf(index));
290      }
291    }
292
293    case Primitive::kPrimLong: {
294      uint32_t index = gp_index_;
295      gp_index_ += 2;
296      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
297        return Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(
298            calling_convention.GetRegisterPairAt(index)));
299      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
300        return Location::QuickParameter(index);
301      } else {
302        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index));
303      }
304    }
305
306    case Primitive::kPrimDouble:
307    case Primitive::kPrimFloat:
308      LOG(FATAL) << "Unimplemented parameter type " << type;
309      break;
310
311    case Primitive::kPrimVoid:
312      LOG(FATAL) << "Unexpected parameter type " << type;
313      break;
314  }
315  return Location();
316}
317
318void CodeGeneratorX86::Move32(Location destination, Location source) {
319  if (source.Equals(destination)) {
320    return;
321  }
322  if (destination.IsRegister()) {
323    if (source.IsRegister()) {
324      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
325    } else {
326      DCHECK(source.IsStackSlot());
327      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
328    }
329  } else {
330    if (source.IsRegister()) {
331      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
332    } else {
333      DCHECK(source.IsStackSlot());
334      __ pushl(Address(ESP, source.GetStackIndex()));
335      __ popl(Address(ESP, destination.GetStackIndex()));
336    }
337  }
338}
339
340void CodeGeneratorX86::Move64(Location destination, Location source) {
341  if (source.Equals(destination)) {
342    return;
343  }
344  if (destination.IsRegister()) {
345    if (source.IsRegister()) {
346      __ movl(destination.AsX86().AsRegisterPairLow(), source.AsX86().AsRegisterPairLow());
347      __ movl(destination.AsX86().AsRegisterPairHigh(), source.AsX86().AsRegisterPairHigh());
348    } else if (source.IsQuickParameter()) {
349      uint32_t argument_index = source.GetQuickParameterIndex();
350      InvokeDexCallingConvention calling_convention;
351      __ movl(destination.AsX86().AsRegisterPairLow(),
352              calling_convention.GetRegisterAt(argument_index));
353      __ movl(destination.AsX86().AsRegisterPairHigh(), Address(ESP,
354          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
355    } else {
356      DCHECK(source.IsDoubleStackSlot());
357      __ movl(destination.AsX86().AsRegisterPairLow(), Address(ESP, source.GetStackIndex()));
358      __ movl(destination.AsX86().AsRegisterPairHigh(),
359              Address(ESP, source.GetHighStackIndex(kX86WordSize)));
360    }
361  } else if (destination.IsQuickParameter()) {
362    InvokeDexCallingConvention calling_convention;
363    uint32_t argument_index = destination.GetQuickParameterIndex();
364    if (source.IsRegister()) {
365      __ movl(calling_convention.GetRegisterAt(argument_index), source.AsX86().AsRegisterPairLow());
366      __ movl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)),
367              source.AsX86().AsRegisterPairHigh());
368    } else {
369      DCHECK(source.IsDoubleStackSlot());
370      __ movl(calling_convention.GetRegisterAt(argument_index),
371              Address(ESP, source.GetStackIndex()));
372      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
373      __ popl(Address(ESP, calling_convention.GetStackOffsetOf(argument_index + 1)));
374    }
375  } else {
376    if (source.IsRegister()) {
377      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsRegisterPairLow());
378      __ movl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)),
379              source.AsX86().AsRegisterPairHigh());
380    } else if (source.IsQuickParameter()) {
381      InvokeDexCallingConvention calling_convention;
382      uint32_t argument_index = source.GetQuickParameterIndex();
383      __ movl(Address(ESP, destination.GetStackIndex()),
384              calling_convention.GetRegisterAt(argument_index));
385      __ pushl(Address(ESP,
386          calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()));
387      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
388    } else {
389      DCHECK(source.IsDoubleStackSlot());
390      __ pushl(Address(ESP, source.GetStackIndex()));
391      __ popl(Address(ESP, destination.GetStackIndex()));
392      __ pushl(Address(ESP, source.GetHighStackIndex(kX86WordSize)));
393      __ popl(Address(ESP, destination.GetHighStackIndex(kX86WordSize)));
394    }
395  }
396}
397
398void CodeGeneratorX86::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
399  if (instruction->AsIntConstant() != nullptr) {
400    Immediate imm(instruction->AsIntConstant()->GetValue());
401    if (location.IsRegister()) {
402      __ movl(location.AsX86().AsCpuRegister(), imm);
403    } else {
404      __ movl(Address(ESP, location.GetStackIndex()), imm);
405    }
406  } else if (instruction->AsLongConstant() != nullptr) {
407    int64_t value = instruction->AsLongConstant()->GetValue();
408    if (location.IsRegister()) {
409      __ movl(location.AsX86().AsRegisterPairLow(), Immediate(Low32Bits(value)));
410      __ movl(location.AsX86().AsRegisterPairHigh(), Immediate(High32Bits(value)));
411    } else {
412      __ movl(Address(ESP, location.GetStackIndex()), Immediate(Low32Bits(value)));
413      __ movl(Address(ESP, location.GetHighStackIndex(kX86WordSize)), Immediate(High32Bits(value)));
414    }
415  } else if (instruction->AsLoadLocal() != nullptr) {
416    switch (instruction->GetType()) {
417      case Primitive::kPrimBoolean:
418      case Primitive::kPrimByte:
419      case Primitive::kPrimChar:
420      case Primitive::kPrimShort:
421      case Primitive::kPrimInt:
422      case Primitive::kPrimNot:
423        Move32(location, Location::StackSlot(GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
424        break;
425
426      case Primitive::kPrimLong:
427        Move64(location, Location::DoubleStackSlot(
428            GetStackSlot(instruction->AsLoadLocal()->GetLocal())));
429        break;
430
431      default:
432        LOG(FATAL) << "Unimplemented local type " << instruction->GetType();
433    }
434  } else {
435    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
436    switch (instruction->GetType()) {
437      case Primitive::kPrimBoolean:
438      case Primitive::kPrimByte:
439      case Primitive::kPrimChar:
440      case Primitive::kPrimShort:
441      case Primitive::kPrimInt:
442      case Primitive::kPrimNot:
443        Move32(location, instruction->GetLocations()->Out());
444        break;
445
446      case Primitive::kPrimLong:
447        Move64(location, instruction->GetLocations()->Out());
448        break;
449
450      default:
451        LOG(FATAL) << "Unimplemented type " << instruction->GetType();
452    }
453  }
454}
455
456void LocationsBuilderX86::VisitGoto(HGoto* got) {
457  got->SetLocations(nullptr);
458}
459
460void InstructionCodeGeneratorX86::VisitGoto(HGoto* got) {
461  HBasicBlock* successor = got->GetSuccessor();
462  if (GetGraph()->GetExitBlock() == successor) {
463    codegen_->GenerateFrameExit();
464  } else if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
465    __ jmp(codegen_->GetLabelOf(successor));
466  }
467}
468
469void LocationsBuilderX86::VisitExit(HExit* exit) {
470  exit->SetLocations(nullptr);
471}
472
473void InstructionCodeGeneratorX86::VisitExit(HExit* exit) {
474  if (kIsDebugBuild) {
475    __ Comment("Unreachable");
476    __ int3();
477  }
478}
479
480void LocationsBuilderX86::VisitIf(HIf* if_instr) {
481  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(if_instr);
482  HInstruction* cond = if_instr->InputAt(0);
483  DCHECK(cond->IsCondition());
484  HCondition* condition = cond->AsCondition();
485  if (condition->NeedsMaterialization()) {
486    locations->SetInAt(0, Location::Any());
487  }
488  if_instr->SetLocations(locations);
489}
490
491void InstructionCodeGeneratorX86::VisitIf(HIf* if_instr) {
492  HInstruction* cond = if_instr->InputAt(0);
493  DCHECK(cond->IsCondition());
494  HCondition* condition = cond->AsCondition();
495  if (condition->NeedsMaterialization()) {
496    // Materialized condition, compare against 0
497    Location lhs = if_instr->GetLocations()->InAt(0);
498    if (lhs.IsRegister()) {
499      __ cmpl(lhs.AsX86().AsCpuRegister(), Immediate(0));
500    } else {
501      __ cmpl(Address(ESP, lhs.GetStackIndex()), Immediate(0));
502    }
503    __ j(kEqual,  codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
504  } else {
505    Location lhs = condition->GetLocations()->InAt(0);
506    Location rhs = condition->GetLocations()->InAt(1);
507    // LHS is guaranteed to be in a register (see LocationsBuilderX86::VisitCondition).
508    if (rhs.IsRegister()) {
509      __ cmpl(lhs.AsX86().AsCpuRegister(), rhs.AsX86().AsCpuRegister());
510    } else if (rhs.IsConstant()) {
511      HIntConstant* instruction = rhs.GetConstant()->AsIntConstant();
512      Immediate imm(instruction->AsIntConstant()->GetValue());
513      __ cmpl(lhs.AsX86().AsCpuRegister(), imm);
514    } else {
515      __ cmpl(lhs.AsX86().AsCpuRegister(), Address(ESP, rhs.GetStackIndex()));
516    }
517    __ j(X86Condition(condition->GetCondition()),
518         codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
519  }
520  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), if_instr->IfFalseSuccessor())) {
521    __ jmp(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
522  }
523}
524
525void LocationsBuilderX86::VisitLocal(HLocal* local) {
526  local->SetLocations(nullptr);
527}
528
529void InstructionCodeGeneratorX86::VisitLocal(HLocal* local) {
530  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
531}
532
533void LocationsBuilderX86::VisitLoadLocal(HLoadLocal* local) {
534  local->SetLocations(nullptr);
535}
536
537void InstructionCodeGeneratorX86::VisitLoadLocal(HLoadLocal* load) {
538  // Nothing to do, this is driven by the code generator.
539}
540
541void LocationsBuilderX86::VisitStoreLocal(HStoreLocal* store) {
542  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(store);
543  switch (store->InputAt(1)->GetType()) {
544    case Primitive::kPrimBoolean:
545    case Primitive::kPrimByte:
546    case Primitive::kPrimChar:
547    case Primitive::kPrimShort:
548    case Primitive::kPrimInt:
549    case Primitive::kPrimNot:
550      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
551      break;
552
553    case Primitive::kPrimLong:
554      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
555      break;
556
557    default:
558      LOG(FATAL) << "Unimplemented local type " << store->InputAt(1)->GetType();
559  }
560  store->SetLocations(locations);
561}
562
563void InstructionCodeGeneratorX86::VisitStoreLocal(HStoreLocal* store) {
564}
565
566void LocationsBuilderX86::VisitCondition(HCondition* comp) {
567  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(comp);
568  locations->SetInAt(0, Location::RequiresRegister());
569  locations->SetInAt(1, Location::Any());
570  if (comp->NeedsMaterialization()) {
571    locations->SetOut(Location::RequiresRegister());
572  }
573  comp->SetLocations(locations);
574}
575
576void InstructionCodeGeneratorX86::VisitCondition(HCondition* comp) {
577  if (comp->NeedsMaterialization()) {
578    LocationSummary* locations = comp->GetLocations();
579    if (locations->InAt(1).IsRegister()) {
580      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
581              locations->InAt(1).AsX86().AsCpuRegister());
582    } else if (locations->InAt(1).IsConstant()) {
583      HConstant* instruction = locations->InAt(1).GetConstant();
584      Immediate imm(instruction->AsIntConstant()->GetValue());
585      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
586    } else {
587      __ cmpl(locations->InAt(0).AsX86().AsCpuRegister(),
588              Address(ESP, locations->InAt(1).GetStackIndex()));
589    }
590    __ setb(X86Condition(comp->GetCondition()), locations->Out().AsX86().AsCpuRegister());
591  }
592}
593
594void LocationsBuilderX86::VisitEqual(HEqual* comp) {
595  VisitCondition(comp);
596}
597
598void InstructionCodeGeneratorX86::VisitEqual(HEqual* comp) {
599  VisitCondition(comp);
600}
601
602void LocationsBuilderX86::VisitNotEqual(HNotEqual* comp) {
603  VisitCondition(comp);
604}
605
606void InstructionCodeGeneratorX86::VisitNotEqual(HNotEqual* comp) {
607  VisitCondition(comp);
608}
609
610void LocationsBuilderX86::VisitLessThan(HLessThan* comp) {
611  VisitCondition(comp);
612}
613
614void InstructionCodeGeneratorX86::VisitLessThan(HLessThan* comp) {
615  VisitCondition(comp);
616}
617
618void LocationsBuilderX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
619  VisitCondition(comp);
620}
621
622void InstructionCodeGeneratorX86::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
623  VisitCondition(comp);
624}
625
626void LocationsBuilderX86::VisitGreaterThan(HGreaterThan* comp) {
627  VisitCondition(comp);
628}
629
630void InstructionCodeGeneratorX86::VisitGreaterThan(HGreaterThan* comp) {
631  VisitCondition(comp);
632}
633
634void LocationsBuilderX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
635  VisitCondition(comp);
636}
637
638void InstructionCodeGeneratorX86::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
639  VisitCondition(comp);
640}
641
642void LocationsBuilderX86::VisitIntConstant(HIntConstant* constant) {
643  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
644  locations->SetOut(Location::ConstantLocation(constant));
645  constant->SetLocations(locations);
646}
647
648void InstructionCodeGeneratorX86::VisitIntConstant(HIntConstant* constant) {
649}
650
651void LocationsBuilderX86::VisitLongConstant(HLongConstant* constant) {
652  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(constant);
653  locations->SetOut(Location::ConstantLocation(constant));
654  constant->SetLocations(locations);
655}
656
657void InstructionCodeGeneratorX86::VisitLongConstant(HLongConstant* constant) {
658  // Will be generated at use site.
659}
660
661void LocationsBuilderX86::VisitReturnVoid(HReturnVoid* ret) {
662  ret->SetLocations(nullptr);
663}
664
665void InstructionCodeGeneratorX86::VisitReturnVoid(HReturnVoid* ret) {
666  codegen_->GenerateFrameExit();
667  __ ret();
668}
669
670void LocationsBuilderX86::VisitReturn(HReturn* ret) {
671  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(ret);
672  switch (ret->InputAt(0)->GetType()) {
673    case Primitive::kPrimBoolean:
674    case Primitive::kPrimByte:
675    case Primitive::kPrimChar:
676    case Primitive::kPrimShort:
677    case Primitive::kPrimInt:
678    case Primitive::kPrimNot:
679      locations->SetInAt(0, X86CpuLocation(EAX));
680      break;
681
682    case Primitive::kPrimLong:
683      locations->SetInAt(
684          0, Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
685      break;
686
687    default:
688      LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
689  }
690  ret->SetLocations(locations);
691}
692
693void InstructionCodeGeneratorX86::VisitReturn(HReturn* ret) {
694  if (kIsDebugBuild) {
695    switch (ret->InputAt(0)->GetType()) {
696      case Primitive::kPrimBoolean:
697      case Primitive::kPrimByte:
698      case Primitive::kPrimChar:
699      case Primitive::kPrimShort:
700      case Primitive::kPrimInt:
701      case Primitive::kPrimNot:
702        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsCpuRegister(), EAX);
703        break;
704
705      case Primitive::kPrimLong:
706        DCHECK_EQ(ret->GetLocations()->InAt(0).AsX86().AsRegisterPair(), EAX_EDX);
707        break;
708
709      default:
710        LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType();
711    }
712  }
713  codegen_->GenerateFrameExit();
714  __ ret();
715}
716
717void LocationsBuilderX86::VisitInvokeStatic(HInvokeStatic* invoke) {
718  codegen_->MarkNotLeaf();
719  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(invoke);
720  locations->AddTemp(X86CpuLocation(EAX));
721
722  InvokeDexCallingConventionVisitor calling_convention_visitor;
723  for (size_t i = 0; i < invoke->InputCount(); i++) {
724    HInstruction* input = invoke->InputAt(i);
725    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
726  }
727
728  switch (invoke->GetType()) {
729    case Primitive::kPrimBoolean:
730    case Primitive::kPrimByte:
731    case Primitive::kPrimChar:
732    case Primitive::kPrimShort:
733    case Primitive::kPrimInt:
734    case Primitive::kPrimNot:
735      locations->SetOut(X86CpuLocation(EAX));
736      break;
737
738    case Primitive::kPrimLong:
739      locations->SetOut(Location::RegisterLocation(X86ManagedRegister::FromRegisterPair(EAX_EDX)));
740      break;
741
742    case Primitive::kPrimVoid:
743      break;
744
745    case Primitive::kPrimDouble:
746    case Primitive::kPrimFloat:
747      LOG(FATAL) << "Unimplemented return type " << invoke->GetType();
748      break;
749  }
750
751  invoke->SetLocations(locations);
752}
753
754void InstructionCodeGeneratorX86::VisitInvokeStatic(HInvokeStatic* invoke) {
755  Register temp = invoke->GetLocations()->GetTemp(0).AsX86().AsCpuRegister();
756  uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
757  size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() +
758      invoke->GetIndexInDexCache() * kX86WordSize;
759
760  // TODO: Implement all kinds of calls:
761  // 1) boot -> boot
762  // 2) app -> boot
763  // 3) app -> app
764  //
765  // Currently we implement the app -> app logic, which looks up in the resolve cache.
766
767  // temp = method;
768  LoadCurrentMethod(temp);
769  // temp = temp->dex_cache_resolved_methods_;
770  __ movl(temp, Address(temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()));
771  // temp = temp[index_in_cache]
772  __ movl(temp, Address(temp, index_in_cache));
773  // (temp + offset_of_quick_compiled_code)()
774  __ call(Address(temp, mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()));
775
776  DCHECK(!codegen_->IsLeafMethod());
777  codegen_->RecordPcInfo(invoke->GetDexPc());
778}
779
780void LocationsBuilderX86::VisitAdd(HAdd* add) {
781  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(add);
782  switch (add->GetResultType()) {
783    case Primitive::kPrimInt:
784    case Primitive::kPrimLong: {
785      locations->SetInAt(0, Location::RequiresRegister());
786      locations->SetInAt(1, Location::Any());
787      locations->SetOut(Location::SameAsFirstInput());
788      break;
789    }
790
791    case Primitive::kPrimBoolean:
792    case Primitive::kPrimByte:
793    case Primitive::kPrimChar:
794    case Primitive::kPrimShort:
795      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
796      break;
797
798    default:
799      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
800  }
801  add->SetLocations(locations);
802}
803
804void InstructionCodeGeneratorX86::VisitAdd(HAdd* add) {
805  LocationSummary* locations = add->GetLocations();
806  switch (add->GetResultType()) {
807    case Primitive::kPrimInt: {
808      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
809                locations->Out().AsX86().AsCpuRegister());
810      if (locations->InAt(1).IsRegister()) {
811        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
812                locations->InAt(1).AsX86().AsCpuRegister());
813      } else if (locations->InAt(1).IsConstant()) {
814        HConstant* instruction = locations->InAt(1).GetConstant();
815        Immediate imm(instruction->AsIntConstant()->GetValue());
816        __ addl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
817      } else {
818        __ addl(locations->InAt(0).AsX86().AsCpuRegister(),
819                Address(ESP, locations->InAt(1).GetStackIndex()));
820      }
821      break;
822    }
823
824    case Primitive::kPrimLong: {
825      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
826                locations->Out().AsX86().AsRegisterPair());
827      if (locations->InAt(1).IsRegister()) {
828        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
829                locations->InAt(1).AsX86().AsRegisterPairLow());
830        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
831                locations->InAt(1).AsX86().AsRegisterPairHigh());
832      } else {
833        __ addl(locations->InAt(0).AsX86().AsRegisterPairLow(),
834                Address(ESP, locations->InAt(1).GetStackIndex()));
835        __ adcl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
836                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
837      }
838      break;
839    }
840
841    case Primitive::kPrimBoolean:
842    case Primitive::kPrimByte:
843    case Primitive::kPrimChar:
844    case Primitive::kPrimShort:
845      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
846      break;
847
848    default:
849      LOG(FATAL) << "Unimplemented add type " << add->GetResultType();
850  }
851}
852
853void LocationsBuilderX86::VisitSub(HSub* sub) {
854  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(sub);
855  switch (sub->GetResultType()) {
856    case Primitive::kPrimInt:
857    case Primitive::kPrimLong: {
858      locations->SetInAt(0, Location::RequiresRegister());
859      locations->SetInAt(1, Location::Any());
860      locations->SetOut(Location::SameAsFirstInput());
861      break;
862    }
863
864    case Primitive::kPrimBoolean:
865    case Primitive::kPrimByte:
866    case Primitive::kPrimChar:
867    case Primitive::kPrimShort:
868      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
869      break;
870
871    default:
872      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
873  }
874  sub->SetLocations(locations);
875}
876
877void InstructionCodeGeneratorX86::VisitSub(HSub* sub) {
878  LocationSummary* locations = sub->GetLocations();
879  switch (sub->GetResultType()) {
880    case Primitive::kPrimInt: {
881      DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(),
882                locations->Out().AsX86().AsCpuRegister());
883      if (locations->InAt(1).IsRegister()) {
884        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
885                locations->InAt(1).AsX86().AsCpuRegister());
886      } else if (locations->InAt(1).IsConstant()) {
887        HConstant* instruction = locations->InAt(1).GetConstant();
888        Immediate imm(instruction->AsIntConstant()->GetValue());
889        __ subl(locations->InAt(0).AsX86().AsCpuRegister(), imm);
890      } else {
891        __ subl(locations->InAt(0).AsX86().AsCpuRegister(),
892                Address(ESP, locations->InAt(1).GetStackIndex()));
893      }
894      break;
895    }
896
897    case Primitive::kPrimLong: {
898      DCHECK_EQ(locations->InAt(0).AsX86().AsRegisterPair(),
899                locations->Out().AsX86().AsRegisterPair());
900      if (locations->InAt(1).IsRegister()) {
901        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
902                locations->InAt(1).AsX86().AsRegisterPairLow());
903        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
904                locations->InAt(1).AsX86().AsRegisterPairHigh());
905      } else {
906        __ subl(locations->InAt(0).AsX86().AsRegisterPairLow(),
907                Address(ESP, locations->InAt(1).GetStackIndex()));
908        __ sbbl(locations->InAt(0).AsX86().AsRegisterPairHigh(),
909                Address(ESP, locations->InAt(1).GetHighStackIndex(kX86WordSize)));
910      }
911      break;
912    }
913
914    case Primitive::kPrimBoolean:
915    case Primitive::kPrimByte:
916    case Primitive::kPrimChar:
917    case Primitive::kPrimShort:
918      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
919      break;
920
921    default:
922      LOG(FATAL) << "Unimplemented sub type " << sub->GetResultType();
923  }
924}
925
926void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
927  codegen_->MarkNotLeaf();
928  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
929  locations->SetOut(X86CpuLocation(EAX));
930  InvokeRuntimeCallingConvention calling_convention;
931  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(0)));
932  locations->AddTemp(X86CpuLocation(calling_convention.GetRegisterAt(1)));
933  instruction->SetLocations(locations);
934}
935
936void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
937  InvokeRuntimeCallingConvention calling_convention;
938  LoadCurrentMethod(calling_convention.GetRegisterAt(1));
939  __ movl(calling_convention.GetRegisterAt(0), Immediate(instruction->GetTypeIndex()));
940
941  __ fs()->call(
942      Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86WordSize, pAllocObjectWithAccessCheck)));
943
944  codegen_->RecordPcInfo(instruction->GetDexPc());
945  DCHECK(!codegen_->IsLeafMethod());
946}
947
948void LocationsBuilderX86::VisitParameterValue(HParameterValue* instruction) {
949  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
950  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
951  if (location.IsStackSlot()) {
952    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
953  } else if (location.IsDoubleStackSlot()) {
954    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
955  }
956  locations->SetOut(location);
957  instruction->SetLocations(locations);
958}
959
960void InstructionCodeGeneratorX86::VisitParameterValue(HParameterValue* instruction) {
961}
962
963void LocationsBuilderX86::VisitNot(HNot* instruction) {
964  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
965  locations->SetInAt(0, Location::RequiresRegister());
966  locations->SetOut(Location::SameAsFirstInput());
967  instruction->SetLocations(locations);
968}
969
970void InstructionCodeGeneratorX86::VisitNot(HNot* instruction) {
971  LocationSummary* locations = instruction->GetLocations();
972  Location out = locations->Out();
973  DCHECK_EQ(locations->InAt(0).AsX86().AsCpuRegister(), out.AsX86().AsCpuRegister());
974  __ xorl(out.AsX86().AsCpuRegister(), Immediate(1));
975}
976
977void LocationsBuilderX86::VisitCompare(HCompare* compare) {
978  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(compare);
979  locations->SetInAt(0, Location::RequiresRegister());
980  locations->SetInAt(1, Location::Any());
981  locations->SetOut(Location::RequiresRegister());
982  compare->SetLocations(locations);
983}
984
985void InstructionCodeGeneratorX86::VisitCompare(HCompare* compare) {
986  Label greater, done;
987  LocationSummary* locations = compare->GetLocations();
988  switch (compare->InputAt(0)->GetType()) {
989    case Primitive::kPrimLong: {
990      Label less, greater, done;
991      Register output = locations->Out().AsX86().AsCpuRegister();
992      X86ManagedRegister left = locations->InAt(0).AsX86();
993      Location right = locations->InAt(1);
994      if (right.IsRegister()) {
995        __ cmpl(left.AsRegisterPairHigh(), right.AsX86().AsRegisterPairHigh());
996      } else {
997        DCHECK(right.IsDoubleStackSlot());
998        __ cmpl(left.AsRegisterPairHigh(), Address(ESP, right.GetHighStackIndex(kX86WordSize)));
999      }
1000      __ j(kLess, &less);  // Signed compare.
1001      __ j(kGreater, &greater);  // Signed compare.
1002      if (right.IsRegister()) {
1003        __ cmpl(left.AsRegisterPairLow(), right.AsX86().AsRegisterPairLow());
1004      } else {
1005        DCHECK(right.IsDoubleStackSlot());
1006        __ cmpl(left.AsRegisterPairLow(), Address(ESP, right.GetStackIndex()));
1007      }
1008      __ movl(output, Immediate(0));
1009      __ j(kEqual, &done);
1010      __ j(kBelow, &less);  // Unsigned compare.
1011
1012      __ Bind(&greater);
1013      __ movl(output, Immediate(1));
1014      __ jmp(&done);
1015
1016      __ Bind(&less);
1017      __ movl(output, Immediate(-1));
1018
1019      __ Bind(&done);
1020      break;
1021    }
1022    default:
1023      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1024  }
1025}
1026
1027void LocationsBuilderX86::VisitPhi(HPhi* instruction) {
1028  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1029  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1030    locations->SetInAt(i, Location::Any());
1031  }
1032  locations->SetOut(Location::Any());
1033  instruction->SetLocations(locations);
1034}
1035
1036void InstructionCodeGeneratorX86::VisitPhi(HPhi* instruction) {
1037  LOG(FATAL) << "Unreachable";
1038}
1039
1040void LocationsBuilderX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1041  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1042  locations->SetInAt(0, Location::RequiresRegister());
1043  Primitive::Type field_type = instruction->InputAt(1)->GetType();
1044  if (field_type == Primitive::kPrimBoolean || field_type == Primitive::kPrimByte) {
1045    // Ensure the value is in a byte register.
1046    locations->SetInAt(1, X86CpuLocation(EAX));
1047  } else {
1048    locations->SetInAt(1, Location::RequiresRegister());
1049  }
1050  // Temporary registers for the write barrier.
1051  if (instruction->InputAt(1)->GetType() == Primitive::kPrimNot) {
1052    locations->AddTemp(Location::RequiresRegister());
1053    // Ensure the card is in a byte register.
1054    locations->AddTemp(X86CpuLocation(ECX));
1055  }
1056  instruction->SetLocations(locations);
1057}
1058
1059void InstructionCodeGeneratorX86::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1060  LocationSummary* locations = instruction->GetLocations();
1061  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1062  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1063  Primitive::Type field_type = instruction->InputAt(1)->GetType();
1064
1065  switch (field_type) {
1066    case Primitive::kPrimBoolean:
1067    case Primitive::kPrimByte: {
1068      ByteRegister value = locations->InAt(1).AsX86().AsByteRegister();
1069      __ movb(Address(obj, offset), value);
1070      break;
1071    }
1072
1073    case Primitive::kPrimShort:
1074    case Primitive::kPrimChar: {
1075      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1076      __ movw(Address(obj, offset), value);
1077      break;
1078    }
1079
1080    case Primitive::kPrimInt: {
1081      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1082      __ movl(Address(obj, offset), value);
1083      break;
1084    }
1085
1086    case Primitive::kPrimNot: {
1087      Register value = locations->InAt(1).AsX86().AsCpuRegister();
1088      __ movl(Address(obj, offset), value);
1089      Label is_null;
1090      Register temp = locations->GetTemp(0).AsX86().AsCpuRegister();
1091      Register card = locations->GetTemp(1).AsX86().AsCpuRegister();
1092      __ testl(value, value);
1093      __ j(kEqual, &is_null);
1094      __ fs()->movl(card, Address::Absolute(Thread::CardTableOffset<kX86WordSize>().Int32Value()));
1095      __ movl(temp, obj);
1096      __ shrl(temp, Immediate(gc::accounting::CardTable::kCardShift));
1097      __ movb(Address(temp, card, TIMES_1, 0),  locations->GetTemp(1).AsX86().AsByteRegister());
1098      __ Bind(&is_null);
1099      break;
1100    }
1101
1102    case Primitive::kPrimLong: {
1103      X86ManagedRegister value = locations->InAt(1).AsX86();
1104      __ movl(Address(obj, offset), value.AsRegisterPairLow());
1105      __ movl(Address(obj, kX86WordSize + offset), value.AsRegisterPairHigh());
1106      break;
1107    }
1108
1109    case Primitive::kPrimFloat:
1110    case Primitive::kPrimDouble:
1111      LOG(FATAL) << "Unimplemented register type " << field_type;
1112
1113    case Primitive::kPrimVoid:
1114      LOG(FATAL) << "Unreachable type " << field_type;
1115  }
1116}
1117
1118void LocationsBuilderX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1119  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1120  locations->SetInAt(0, Location::RequiresRegister());
1121  locations->SetOut(Location::RequiresRegister());
1122  instruction->SetLocations(locations);
1123}
1124
1125void InstructionCodeGeneratorX86::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1126  LocationSummary* locations = instruction->GetLocations();
1127  Register obj = locations->InAt(0).AsX86().AsCpuRegister();
1128  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1129
1130  switch (instruction->GetType()) {
1131    case Primitive::kPrimBoolean: {
1132      Register out = locations->Out().AsX86().AsCpuRegister();
1133      __ movzxb(out, Address(obj, offset));
1134      break;
1135    }
1136
1137    case Primitive::kPrimByte: {
1138      Register out = locations->Out().AsX86().AsCpuRegister();
1139      __ movsxb(out, Address(obj, offset));
1140      break;
1141    }
1142
1143    case Primitive::kPrimShort: {
1144      Register out = locations->Out().AsX86().AsCpuRegister();
1145      __ movsxw(out, Address(obj, offset));
1146      break;
1147    }
1148
1149    case Primitive::kPrimChar: {
1150      Register out = locations->Out().AsX86().AsCpuRegister();
1151      __ movzxw(out, Address(obj, offset));
1152      break;
1153    }
1154
1155    case Primitive::kPrimInt:
1156    case Primitive::kPrimNot: {
1157      Register out = locations->Out().AsX86().AsCpuRegister();
1158      __ movl(out, Address(obj, offset));
1159      break;
1160    }
1161
1162    case Primitive::kPrimLong: {
1163      // TODO: support volatile.
1164      X86ManagedRegister out = locations->Out().AsX86();
1165      __ movl(out.AsRegisterPairLow(), Address(obj, offset));
1166      __ movl(out.AsRegisterPairHigh(), Address(obj, kX86WordSize + offset));
1167      break;
1168    }
1169
1170    case Primitive::kPrimFloat:
1171    case Primitive::kPrimDouble:
1172      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1173
1174    case Primitive::kPrimVoid:
1175      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1176  }
1177}
1178
1179void LocationsBuilderX86::VisitNullCheck(HNullCheck* instruction) {
1180  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction);
1181  locations->SetInAt(0, Location::Any());
1182  // TODO: Have a normalization phase that makes this instruction never used.
1183  locations->SetOut(Location::SameAsFirstInput());
1184  instruction->SetLocations(locations);
1185}
1186
1187void InstructionCodeGeneratorX86::VisitNullCheck(HNullCheck* instruction) {
1188  SlowPathCode* slow_path =
1189      new (GetGraph()->GetArena()) NullCheckSlowPathX86(instruction->GetDexPc());
1190  codegen_->AddSlowPath(slow_path);
1191
1192  LocationSummary* locations = instruction->GetLocations();
1193  Location obj = locations->InAt(0);
1194  DCHECK(obj.Equals(locations->Out()));
1195
1196  if (obj.IsRegister()) {
1197    __ cmpl(obj.AsX86().AsCpuRegister(), Immediate(0));
1198  } else {
1199    DCHECK(locations->InAt(0).IsStackSlot());
1200    __ cmpl(Address(ESP, obj.GetStackIndex()), Immediate(0));
1201  }
1202  __ j(kEqual, slow_path->GetEntryLabel());
1203}
1204
1205void LocationsBuilderX86::VisitTemporary(HTemporary* temp) {
1206  temp->SetLocations(nullptr);
1207}
1208
1209void InstructionCodeGeneratorX86::VisitTemporary(HTemporary* temp) {
1210  // Nothing to do, this is driven by the code generator.
1211}
1212
1213void LocationsBuilderX86::VisitParallelMove(HParallelMove* instruction) {
1214  LOG(FATAL) << "Unreachable";
1215}
1216
1217void InstructionCodeGeneratorX86::VisitParallelMove(HParallelMove* instruction) {
1218  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1219}
1220
1221X86Assembler* ParallelMoveResolverX86::GetAssembler() const {
1222  return codegen_->GetAssembler();
1223}
1224
1225void ParallelMoveResolverX86::MoveMemoryToMemory(int dst, int src) {
1226  ScratchRegisterScope ensure_scratch(
1227      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1228  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1229  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, src + stack_offset));
1230  __ movl(Address(ESP, dst + stack_offset), static_cast<Register>(ensure_scratch.GetRegister()));
1231}
1232
1233void ParallelMoveResolverX86::EmitMove(size_t index) {
1234  MoveOperands* move = moves_.Get(index);
1235  Location source = move->GetSource();
1236  Location destination = move->GetDestination();
1237
1238  if (source.IsRegister()) {
1239    if (destination.IsRegister()) {
1240      __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1241    } else {
1242      DCHECK(destination.IsStackSlot());
1243      __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister());
1244    }
1245  } else if (source.IsStackSlot()) {
1246    if (destination.IsRegister()) {
1247      __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex()));
1248    } else {
1249      DCHECK(destination.IsStackSlot());
1250      MoveMemoryToMemory(destination.GetStackIndex(),
1251                         source.GetStackIndex());
1252    }
1253  } else if (source.IsConstant()) {
1254    HIntConstant* instruction = source.GetConstant()->AsIntConstant();
1255    Immediate imm(instruction->AsIntConstant()->GetValue());
1256    if (destination.IsRegister()) {
1257      __ movl(destination.AsX86().AsCpuRegister(), imm);
1258    } else {
1259      __ movl(Address(ESP, destination.GetStackIndex()), imm);
1260    }
1261  } else {
1262    LOG(FATAL) << "Unimplemented";
1263  }
1264}
1265
1266void ParallelMoveResolverX86::Exchange(Register reg, int mem) {
1267  Register suggested_scratch = reg == EAX ? EBX : EAX;
1268  ScratchRegisterScope ensure_scratch(
1269      this, reg, suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1270
1271  int stack_offset = ensure_scratch.IsSpilled() ? kX86WordSize : 0;
1272  __ movl(static_cast<Register>(ensure_scratch.GetRegister()), Address(ESP, mem + stack_offset));
1273  __ movl(Address(ESP, mem + stack_offset), reg);
1274  __ movl(reg, static_cast<Register>(ensure_scratch.GetRegister()));
1275}
1276
1277void ParallelMoveResolverX86::Exchange(int mem1, int mem2) {
1278  ScratchRegisterScope ensure_scratch1(
1279      this, kNoRegister, EAX, codegen_->GetNumberOfCoreRegisters());
1280
1281  Register suggested_scratch = ensure_scratch1.GetRegister() == EAX ? EBX : EAX;
1282  ScratchRegisterScope ensure_scratch2(
1283      this, ensure_scratch1.GetRegister(), suggested_scratch, codegen_->GetNumberOfCoreRegisters());
1284
1285  int stack_offset = ensure_scratch1.IsSpilled() ? kX86WordSize : 0;
1286  stack_offset += ensure_scratch2.IsSpilled() ? kX86WordSize : 0;
1287  __ movl(static_cast<Register>(ensure_scratch1.GetRegister()), Address(ESP, mem1 + stack_offset));
1288  __ movl(static_cast<Register>(ensure_scratch2.GetRegister()), Address(ESP, mem2 + stack_offset));
1289  __ movl(Address(ESP, mem2 + stack_offset), static_cast<Register>(ensure_scratch1.GetRegister()));
1290  __ movl(Address(ESP, mem1 + stack_offset), static_cast<Register>(ensure_scratch2.GetRegister()));
1291}
1292
1293void ParallelMoveResolverX86::EmitSwap(size_t index) {
1294  MoveOperands* move = moves_.Get(index);
1295  Location source = move->GetSource();
1296  Location destination = move->GetDestination();
1297
1298  if (source.IsRegister() && destination.IsRegister()) {
1299    __ xchgl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister());
1300  } else if (source.IsRegister() && destination.IsStackSlot()) {
1301    Exchange(source.AsX86().AsCpuRegister(), destination.GetStackIndex());
1302  } else if (source.IsStackSlot() && destination.IsRegister()) {
1303    Exchange(destination.AsX86().AsCpuRegister(), source.GetStackIndex());
1304  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
1305    Exchange(destination.GetStackIndex(), source.GetStackIndex());
1306  } else {
1307    LOG(FATAL) << "Unimplemented";
1308  }
1309}
1310
1311void ParallelMoveResolverX86::SpillScratch(int reg) {
1312  __ pushl(static_cast<Register>(reg));
1313}
1314
1315void ParallelMoveResolverX86::RestoreScratch(int reg) {
1316  __ popl(static_cast<Register>(reg));
1317}
1318
1319}  // namespace x86
1320}  // namespace art
1321