code_generator_arm.cc revision 7c4954d429626a6ceafbf05be41bf5f840894e44
1/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "code_generator_arm.h"
18
19#include "entrypoints/quick/quick_entrypoints.h"
20#include "gc/accounting/card_table.h"
21#include "mirror/array-inl.h"
22#include "mirror/art_method.h"
23#include "mirror/class.h"
24#include "thread.h"
25#include "utils/assembler.h"
26#include "utils/arm/assembler_arm.h"
27#include "utils/arm/managed_register_arm.h"
28#include "utils/stack_checks.h"
29
30namespace art {
31
32namespace arm {
33
34static DRegister FromLowSToD(SRegister reg) {
35  DCHECK_EQ(reg % 2, 0);
36  return static_cast<DRegister>(reg / 2);
37}
38
39static constexpr bool kExplicitStackOverflowCheck = false;
40
41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2;  // LR, R6, R7
42static constexpr int kCurrentMethodStackOffset = 0;
43
44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { };
48static constexpr size_t kRuntimeParameterFpuRegistersLength = 0;
49
50class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> {
51 public:
52  InvokeRuntimeCallingConvention()
53      : CallingConvention(kRuntimeParameterCoreRegisters,
54                          kRuntimeParameterCoreRegistersLength,
55                          kRuntimeParameterFpuRegisters,
56                          kRuntimeParameterFpuRegistersLength) {}
57
58 private:
59  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
60};
61
62#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())->
63
64class SlowPathCodeARM : public SlowPathCode {
65 public:
66  SlowPathCodeARM() : entry_label_(), exit_label_() {}
67
68  Label* GetEntryLabel() { return &entry_label_; }
69  Label* GetExitLabel() { return &exit_label_; }
70
71 private:
72  Label entry_label_;
73  Label exit_label_;
74
75  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM);
76};
77
78class NullCheckSlowPathARM : public SlowPathCodeARM {
79 public:
80  explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {}
81
82  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
83    __ Bind(GetEntryLabel());
84    int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowNullPointer).Int32Value();
85    __ LoadFromOffset(kLoadWord, LR, TR, offset);
86    __ blx(LR);
87    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
88  }
89
90 private:
91  HNullCheck* const instruction_;
92  DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM);
93};
94
95class StackOverflowCheckSlowPathARM : public SlowPathCodeARM {
96 public:
97  StackOverflowCheckSlowPathARM() {}
98
99  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
100    __ Bind(GetEntryLabel());
101    __ LoadFromOffset(kLoadWord, PC, TR,
102        QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value());
103  }
104
105 private:
106  DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM);
107};
108
109class SuspendCheckSlowPathARM : public SlowPathCodeARM {
110 public:
111  explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor)
112      : instruction_(instruction), successor_(successor) {}
113
114  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
115    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
116    __ Bind(GetEntryLabel());
117    codegen->SaveLiveRegisters(instruction_->GetLocations());
118    int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pTestSuspend).Int32Value();
119    __ LoadFromOffset(kLoadWord, LR, TR, offset);
120    __ blx(LR);
121    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
122    codegen->RestoreLiveRegisters(instruction_->GetLocations());
123    if (successor_ == nullptr) {
124      __ b(GetReturnLabel());
125    } else {
126      __ b(arm_codegen->GetLabelOf(successor_));
127    }
128  }
129
130  Label* GetReturnLabel() {
131    DCHECK(successor_ == nullptr);
132    return &return_label_;
133  }
134
135 private:
136  HSuspendCheck* const instruction_;
137  // If not null, the block to branch to after the suspend check.
138  HBasicBlock* const successor_;
139
140  // If `successor_` is null, the label to branch to after the suspend check.
141  Label return_label_;
142
143  DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM);
144};
145
146class BoundsCheckSlowPathARM : public SlowPathCodeARM {
147 public:
148  BoundsCheckSlowPathARM(HBoundsCheck* instruction,
149                         Location index_location,
150                         Location length_location)
151      : instruction_(instruction),
152        index_location_(index_location),
153        length_location_(length_location) {}
154
155  virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
156    CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
157    __ Bind(GetEntryLabel());
158    InvokeRuntimeCallingConvention calling_convention;
159    arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_);
160    arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_);
161    int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowArrayBounds).Int32Value();
162    __ LoadFromOffset(kLoadWord, LR, TR, offset);
163    __ blx(LR);
164    codegen->RecordPcInfo(instruction_, instruction_->GetDexPc());
165  }
166
167 private:
168  HBoundsCheck* const instruction_;
169  const Location index_location_;
170  const Location length_location_;
171
172  DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM);
173};
174
175#undef __
176#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())->
177
178inline Condition ARMCondition(IfCondition cond) {
179  switch (cond) {
180    case kCondEQ: return EQ;
181    case kCondNE: return NE;
182    case kCondLT: return LT;
183    case kCondLE: return LE;
184    case kCondGT: return GT;
185    case kCondGE: return GE;
186    default:
187      LOG(FATAL) << "Unknown if condition";
188  }
189  return EQ;        // Unreachable.
190}
191
192inline Condition ARMOppositeCondition(IfCondition cond) {
193  switch (cond) {
194    case kCondEQ: return NE;
195    case kCondNE: return EQ;
196    case kCondLT: return GE;
197    case kCondLE: return GT;
198    case kCondGT: return LE;
199    case kCondGE: return LT;
200    default:
201      LOG(FATAL) << "Unknown if condition";
202  }
203  return EQ;        // Unreachable.
204}
205
206void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const {
207  stream << ArmManagedRegister::FromCoreRegister(Register(reg));
208}
209
210void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const {
211  stream << ArmManagedRegister::FromSRegister(SRegister(reg));
212}
213
214size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) {
215  __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index);
216  return kArmWordSize;
217}
218
219size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) {
220  __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index);
221  return kArmWordSize;
222}
223
224CodeGeneratorARM::CodeGeneratorARM(HGraph* graph)
225    : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs),
226      block_labels_(graph->GetArena(), 0),
227      location_builder_(graph, this),
228      instruction_visitor_(graph, this),
229      move_resolver_(graph->GetArena(), this),
230      assembler_(true) {}
231
232size_t CodeGeneratorARM::FrameEntrySpillSize() const {
233  return kNumberOfPushedRegistersAtEntry * kArmWordSize;
234}
235
236Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const {
237  switch (type) {
238    case Primitive::kPrimLong: {
239      size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs);
240      ArmManagedRegister pair =
241          ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg));
242      DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]);
243      DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]);
244
245      blocked_core_registers_[pair.AsRegisterPairLow()] = true;
246      blocked_core_registers_[pair.AsRegisterPairHigh()] = true;
247      UpdateBlockedPairRegisters();
248      return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
249    }
250
251    case Primitive::kPrimByte:
252    case Primitive::kPrimBoolean:
253    case Primitive::kPrimChar:
254    case Primitive::kPrimShort:
255    case Primitive::kPrimInt:
256    case Primitive::kPrimNot: {
257      int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters);
258      // Block all register pairs that contain `reg`.
259      for (int i = 0; i < kNumberOfRegisterPairs; i++) {
260        ArmManagedRegister current =
261            ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
262        if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) {
263          blocked_register_pairs_[i] = true;
264        }
265      }
266      return Location::RegisterLocation(reg);
267    }
268
269    case Primitive::kPrimFloat: {
270      int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters);
271      return Location::FpuRegisterLocation(reg);
272    }
273
274    case Primitive::kPrimDouble: {
275      int reg = FindTwoFreeConsecutiveEntries(blocked_fpu_registers_, kNumberOfSRegisters);
276      return Location::FpuRegisterPairLocation(reg, reg + 1);
277    }
278
279    case Primitive::kPrimVoid:
280      LOG(FATAL) << "Unreachable type " << type;
281  }
282
283  return Location();
284}
285
286void CodeGeneratorARM::SetupBlockedRegisters() const {
287  // Don't allocate the dalvik style register pair passing.
288  blocked_register_pairs_[R1_R2] = true;
289
290  // Stack register, LR and PC are always reserved.
291  blocked_core_registers_[SP] = true;
292  blocked_core_registers_[LR] = true;
293  blocked_core_registers_[PC] = true;
294
295  // Reserve R4 for suspend check.
296  blocked_core_registers_[R4] = true;
297
298  // Reserve thread register.
299  blocked_core_registers_[TR] = true;
300
301  // Reserve temp register.
302  blocked_core_registers_[IP] = true;
303
304  // TODO: We currently don't use Quick's callee saved registers.
305  // We always save and restore R6 and R7 to make sure we can use three
306  // register pairs for long operations.
307  blocked_core_registers_[R5] = true;
308  blocked_core_registers_[R8] = true;
309  blocked_core_registers_[R10] = true;
310  blocked_core_registers_[R11] = true;
311
312  blocked_fpu_registers_[S16] = true;
313  blocked_fpu_registers_[S17] = true;
314  blocked_fpu_registers_[S18] = true;
315  blocked_fpu_registers_[S19] = true;
316  blocked_fpu_registers_[S20] = true;
317  blocked_fpu_registers_[S21] = true;
318  blocked_fpu_registers_[S22] = true;
319  blocked_fpu_registers_[S23] = true;
320
321  UpdateBlockedPairRegisters();
322}
323
324void CodeGeneratorARM::UpdateBlockedPairRegisters() const {
325  for (int i = 0; i < kNumberOfRegisterPairs; i++) {
326    ArmManagedRegister current =
327        ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i));
328    if (blocked_core_registers_[current.AsRegisterPairLow()]
329        || blocked_core_registers_[current.AsRegisterPairHigh()]) {
330      blocked_register_pairs_[i] = true;
331    }
332  }
333}
334
335InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen)
336      : HGraphVisitor(graph),
337        assembler_(codegen->GetAssembler()),
338        codegen_(codegen) {}
339
340void CodeGeneratorARM::GenerateFrameEntry() {
341  bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm);
342  if (!skip_overflow_check) {
343    if (kExplicitStackOverflowCheck) {
344      SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM();
345      AddSlowPath(slow_path);
346
347      __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value());
348      __ cmp(SP, ShifterOperand(IP));
349      __ b(slow_path->GetEntryLabel(), CC);
350    } else {
351      __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm)));
352      __ LoadFromOffset(kLoadWord, IP, IP, 0);
353      RecordPcInfo(nullptr, 0);
354    }
355  }
356
357  core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7);
358  __ PushList(1 << LR | 1 << R6 | 1 << R7);
359
360  // The return PC has already been pushed on the stack.
361  __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize));
362  __ StoreToOffset(kStoreWord, R0, SP, 0);
363}
364
365void CodeGeneratorARM::GenerateFrameExit() {
366  __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize);
367  __ PopList(1 << PC | 1 << R6 | 1 << R7);
368}
369
370void CodeGeneratorARM::Bind(HBasicBlock* block) {
371  __ Bind(GetLabelOf(block));
372}
373
374Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
375  switch (load->GetType()) {
376    case Primitive::kPrimLong:
377    case Primitive::kPrimDouble:
378      return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
379      break;
380
381    case Primitive::kPrimInt:
382    case Primitive::kPrimNot:
383    case Primitive::kPrimFloat:
384      return Location::StackSlot(GetStackSlot(load->GetLocal()));
385
386    case Primitive::kPrimBoolean:
387    case Primitive::kPrimByte:
388    case Primitive::kPrimChar:
389    case Primitive::kPrimShort:
390    case Primitive::kPrimVoid:
391      LOG(FATAL) << "Unexpected type " << load->GetType();
392  }
393
394  LOG(FATAL) << "Unreachable";
395  return Location();
396}
397
398Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
399  switch (type) {
400    case Primitive::kPrimBoolean:
401    case Primitive::kPrimByte:
402    case Primitive::kPrimChar:
403    case Primitive::kPrimShort:
404    case Primitive::kPrimInt:
405    case Primitive::kPrimNot: {
406      uint32_t index = gp_index_++;
407      uint32_t stack_index = stack_index_++;
408      if (index < calling_convention.GetNumberOfRegisters()) {
409        return Location::RegisterLocation(calling_convention.GetRegisterAt(index));
410      } else {
411        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
412      }
413    }
414
415    case Primitive::kPrimLong: {
416      uint32_t index = gp_index_;
417      uint32_t stack_index = stack_index_;
418      gp_index_ += 2;
419      stack_index_ += 2;
420      if (index + 1 < calling_convention.GetNumberOfRegisters()) {
421        ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair(
422            calling_convention.GetRegisterPairAt(index));
423        return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh());
424      } else if (index + 1 == calling_convention.GetNumberOfRegisters()) {
425        return Location::QuickParameter(stack_index);
426      } else {
427        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
428      }
429    }
430
431    case Primitive::kPrimFloat: {
432      uint32_t stack_index = stack_index_++;
433      if (float_index_ % 2 == 0) {
434        float_index_ = std::max(double_index_, float_index_);
435      }
436      if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) {
437        return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++));
438      } else {
439        return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index));
440      }
441    }
442
443    case Primitive::kPrimDouble: {
444      double_index_ = std::max(double_index_, RoundUp(float_index_, 2));
445      uint32_t stack_index = stack_index_;
446      stack_index_ += 2;
447      if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) {
448        uint32_t index = double_index_;
449        double_index_ += 2;
450        return Location::FpuRegisterPairLocation(
451          calling_convention.GetFpuRegisterAt(index),
452          calling_convention.GetFpuRegisterAt(index + 1));
453      } else {
454        return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index));
455      }
456    }
457
458    case Primitive::kPrimVoid:
459      LOG(FATAL) << "Unexpected parameter type " << type;
460      break;
461  }
462  return Location();
463}
464
465Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) {
466  switch (type) {
467    case Primitive::kPrimBoolean:
468    case Primitive::kPrimByte:
469    case Primitive::kPrimChar:
470    case Primitive::kPrimShort:
471    case Primitive::kPrimInt:
472    case Primitive::kPrimNot: {
473      return Location::RegisterLocation(R0);
474    }
475
476    case Primitive::kPrimFloat: {
477      return Location::FpuRegisterLocation(S0);
478    }
479
480    case Primitive::kPrimLong: {
481      return Location::RegisterPairLocation(R0, R1);
482    }
483
484    case Primitive::kPrimDouble: {
485      return Location::FpuRegisterPairLocation(S0, S1);
486    }
487
488    case Primitive::kPrimVoid:
489      return Location();
490  }
491  UNREACHABLE();
492  return Location();
493}
494
495void CodeGeneratorARM::Move32(Location destination, Location source) {
496  if (source.Equals(destination)) {
497    return;
498  }
499  if (destination.IsRegister()) {
500    if (source.IsRegister()) {
501      __ Mov(destination.As<Register>(), source.As<Register>());
502    } else if (source.IsFpuRegister()) {
503      __ vmovrs(destination.As<Register>(), source.As<SRegister>());
504    } else {
505      __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex());
506    }
507  } else if (destination.IsFpuRegister()) {
508    if (source.IsRegister()) {
509      __ vmovsr(destination.As<SRegister>(), source.As<Register>());
510    } else if (source.IsFpuRegister()) {
511      __ vmovs(destination.As<SRegister>(), source.As<SRegister>());
512    } else {
513      __ LoadSFromOffset(destination.As<SRegister>(), SP, source.GetStackIndex());
514    }
515  } else {
516    DCHECK(destination.IsStackSlot());
517    if (source.IsRegister()) {
518      __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex());
519    } else if (source.IsFpuRegister()) {
520      __ StoreSToOffset(source.As<SRegister>(), SP, destination.GetStackIndex());
521    } else {
522      DCHECK(source.IsStackSlot());
523      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
524      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
525    }
526  }
527}
528
529void CodeGeneratorARM::Move64(Location destination, Location source) {
530  if (source.Equals(destination)) {
531    return;
532  }
533  if (destination.IsRegisterPair()) {
534    if (source.IsRegisterPair()) {
535      __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>());
536      __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>());
537    } else if (source.IsFpuRegister()) {
538      UNIMPLEMENTED(FATAL);
539    } else if (source.IsQuickParameter()) {
540      uint32_t argument_index = source.GetQuickParameterIndex();
541      InvokeDexCallingConvention calling_convention;
542      __ Mov(destination.AsRegisterPairLow<Register>(),
543             calling_convention.GetRegisterAt(argument_index));
544      __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(),
545             SP, calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize());
546    } else {
547      DCHECK(source.IsDoubleStackSlot());
548      if (destination.AsRegisterPairLow<Register>() == R1) {
549        DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2);
550        __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex());
551        __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize));
552      } else {
553        __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(),
554                          SP, source.GetStackIndex());
555      }
556    }
557  } else if (destination.IsFpuRegisterPair()) {
558    if (source.IsDoubleStackSlot()) {
559      __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()),
560                         SP,
561                         source.GetStackIndex());
562    } else {
563      UNIMPLEMENTED(FATAL);
564    }
565  } else if (destination.IsQuickParameter()) {
566    InvokeDexCallingConvention calling_convention;
567    uint32_t argument_index = destination.GetQuickParameterIndex();
568    if (source.IsRegisterPair()) {
569      __ Mov(calling_convention.GetRegisterAt(argument_index),
570             source.AsRegisterPairLow<Register>());
571      __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(),
572             SP, calling_convention.GetStackOffsetOf(argument_index + 1));
573    } else if (source.IsFpuRegister()) {
574      UNIMPLEMENTED(FATAL);
575    } else {
576      DCHECK(source.IsDoubleStackSlot());
577      __ LoadFromOffset(
578          kLoadWord, calling_convention.GetRegisterAt(argument_index), SP, source.GetStackIndex());
579      __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize));
580      __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(argument_index + 1));
581    }
582  } else {
583    DCHECK(destination.IsDoubleStackSlot());
584    if (source.IsRegisterPair()) {
585      if (source.AsRegisterPairLow<Register>() == R1) {
586        DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2);
587        __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex());
588        __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize));
589      } else {
590        __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(),
591                         SP, destination.GetStackIndex());
592      }
593    } else if (source.IsQuickParameter()) {
594      InvokeDexCallingConvention calling_convention;
595      uint32_t argument_index = source.GetQuickParameterIndex();
596      __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(argument_index),
597             SP, destination.GetStackIndex());
598      __ LoadFromOffset(kLoadWord, R0,
599             SP, calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize());
600      __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize));
601    } else if (source.IsFpuRegisterPair()) {
602      __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()),
603                        SP,
604                        destination.GetStackIndex());
605    } else {
606      DCHECK(source.IsDoubleStackSlot());
607      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
608      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
609      __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize));
610      __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize));
611    }
612  }
613}
614
615void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) {
616  LocationSummary* locations = instruction->GetLocations();
617  if (locations != nullptr && locations->Out().Equals(location)) {
618    return;
619  }
620
621  if (instruction->IsIntConstant()) {
622    int32_t value = instruction->AsIntConstant()->GetValue();
623    if (location.IsRegister()) {
624      __ LoadImmediate(location.As<Register>(), value);
625    } else {
626      DCHECK(location.IsStackSlot());
627      __ LoadImmediate(IP, value);
628      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
629    }
630  } else if (instruction->IsLongConstant()) {
631    int64_t value = instruction->AsLongConstant()->GetValue();
632    if (location.IsRegisterPair()) {
633      __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value));
634      __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value));
635    } else {
636      DCHECK(location.IsDoubleStackSlot());
637      __ LoadImmediate(IP, Low32Bits(value));
638      __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex());
639      __ LoadImmediate(IP, High32Bits(value));
640      __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize));
641    }
642  } else if (instruction->IsLoadLocal()) {
643    uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal());
644    switch (instruction->GetType()) {
645      case Primitive::kPrimBoolean:
646      case Primitive::kPrimByte:
647      case Primitive::kPrimChar:
648      case Primitive::kPrimShort:
649      case Primitive::kPrimInt:
650      case Primitive::kPrimNot:
651      case Primitive::kPrimFloat:
652        Move32(location, Location::StackSlot(stack_slot));
653        break;
654
655      case Primitive::kPrimLong:
656      case Primitive::kPrimDouble:
657        Move64(location, Location::DoubleStackSlot(stack_slot));
658        break;
659
660      default:
661        LOG(FATAL) << "Unexpected type " << instruction->GetType();
662    }
663  } else {
664    DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary());
665    switch (instruction->GetType()) {
666      case Primitive::kPrimBoolean:
667      case Primitive::kPrimByte:
668      case Primitive::kPrimChar:
669      case Primitive::kPrimShort:
670      case Primitive::kPrimNot:
671      case Primitive::kPrimInt:
672      case Primitive::kPrimFloat:
673        Move32(location, locations->Out());
674        break;
675
676      case Primitive::kPrimLong:
677      case Primitive::kPrimDouble:
678        Move64(location, locations->Out());
679        break;
680
681      default:
682        LOG(FATAL) << "Unexpected type " << instruction->GetType();
683    }
684  }
685}
686
687void LocationsBuilderARM::VisitGoto(HGoto* got) {
688  got->SetLocations(nullptr);
689}
690
691void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) {
692  HBasicBlock* successor = got->GetSuccessor();
693  DCHECK(!successor->IsExitBlock());
694
695  HBasicBlock* block = got->GetBlock();
696  HInstruction* previous = got->GetPrevious();
697
698  HLoopInformation* info = block->GetLoopInformation();
699  if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) {
700    codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck());
701    GenerateSuspendCheck(info->GetSuspendCheck(), successor);
702    return;
703  }
704
705  if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) {
706    GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr);
707  }
708  if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) {
709    __ b(codegen_->GetLabelOf(successor));
710  }
711}
712
713void LocationsBuilderARM::VisitExit(HExit* exit) {
714  exit->SetLocations(nullptr);
715}
716
717void InstructionCodeGeneratorARM::VisitExit(HExit* exit) {
718  if (kIsDebugBuild) {
719    __ Comment("Unreachable");
720    __ bkpt(0);
721  }
722}
723
724void LocationsBuilderARM::VisitIf(HIf* if_instr) {
725  LocationSummary* locations =
726      new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall);
727  HInstruction* cond = if_instr->InputAt(0);
728  if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
729    locations->SetInAt(0, Location::RequiresRegister());
730  }
731}
732
733void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) {
734  HInstruction* cond = if_instr->InputAt(0);
735  if (cond->IsIntConstant()) {
736    // Constant condition, statically compared against 1.
737    int32_t cond_value = cond->AsIntConstant()->GetValue();
738    if (cond_value == 1) {
739      if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
740                                     if_instr->IfTrueSuccessor())) {
741        __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()));
742      }
743      return;
744    } else {
745      DCHECK_EQ(cond_value, 0);
746    }
747  } else {
748    if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) {
749      // Condition has been materialized, compare the output to 0
750      DCHECK(if_instr->GetLocations()->InAt(0).IsRegister());
751      __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(),
752             ShifterOperand(0));
753      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE);
754    } else {
755      // Condition has not been materialized, use its inputs as the
756      // comparison and its condition as the branch condition.
757      LocationSummary* locations = cond->GetLocations();
758      if (locations->InAt(1).IsRegister()) {
759        __ cmp(locations->InAt(0).As<Register>(),
760               ShifterOperand(locations->InAt(1).As<Register>()));
761      } else {
762        DCHECK(locations->InAt(1).IsConstant());
763        int32_t value =
764            locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
765        ShifterOperand operand;
766        if (ShifterOperand::CanHoldArm(value, &operand)) {
767          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
768        } else {
769          Register temp = IP;
770          __ LoadImmediate(temp, value);
771          __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
772        }
773      }
774      __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()),
775           ARMCondition(cond->AsCondition()->GetCondition()));
776    }
777  }
778  if (!codegen_->GoesToNextBlock(if_instr->GetBlock(),
779                                 if_instr->IfFalseSuccessor())) {
780    __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor()));
781  }
782}
783
784
785void LocationsBuilderARM::VisitCondition(HCondition* comp) {
786  LocationSummary* locations =
787      new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall);
788  locations->SetInAt(0, Location::RequiresRegister());
789  locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1)));
790  if (comp->NeedsMaterialization()) {
791    locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
792  }
793}
794
795void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) {
796  if (!comp->NeedsMaterialization()) return;
797
798  LocationSummary* locations = comp->GetLocations();
799  if (locations->InAt(1).IsRegister()) {
800    __ cmp(locations->InAt(0).As<Register>(),
801           ShifterOperand(locations->InAt(1).As<Register>()));
802  } else {
803    DCHECK(locations->InAt(1).IsConstant());
804    int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue();
805    ShifterOperand operand;
806    if (ShifterOperand::CanHoldArm(value, &operand)) {
807      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value));
808    } else {
809      Register temp = IP;
810      __ LoadImmediate(temp, value);
811      __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp));
812    }
813  }
814  __ it(ARMCondition(comp->GetCondition()), kItElse);
815  __ mov(locations->Out().As<Register>(), ShifterOperand(1),
816         ARMCondition(comp->GetCondition()));
817  __ mov(locations->Out().As<Register>(), ShifterOperand(0),
818         ARMOppositeCondition(comp->GetCondition()));
819}
820
821void LocationsBuilderARM::VisitEqual(HEqual* comp) {
822  VisitCondition(comp);
823}
824
825void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) {
826  VisitCondition(comp);
827}
828
829void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) {
830  VisitCondition(comp);
831}
832
833void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) {
834  VisitCondition(comp);
835}
836
837void LocationsBuilderARM::VisitLessThan(HLessThan* comp) {
838  VisitCondition(comp);
839}
840
841void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) {
842  VisitCondition(comp);
843}
844
845void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
846  VisitCondition(comp);
847}
848
849void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) {
850  VisitCondition(comp);
851}
852
853void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) {
854  VisitCondition(comp);
855}
856
857void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) {
858  VisitCondition(comp);
859}
860
861void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
862  VisitCondition(comp);
863}
864
865void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) {
866  VisitCondition(comp);
867}
868
869void LocationsBuilderARM::VisitLocal(HLocal* local) {
870  local->SetLocations(nullptr);
871}
872
873void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) {
874  DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock());
875}
876
877void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) {
878  load->SetLocations(nullptr);
879}
880
881void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) {
882  // Nothing to do, this is driven by the code generator.
883}
884
885void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) {
886  LocationSummary* locations =
887      new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall);
888  switch (store->InputAt(1)->GetType()) {
889    case Primitive::kPrimBoolean:
890    case Primitive::kPrimByte:
891    case Primitive::kPrimChar:
892    case Primitive::kPrimShort:
893    case Primitive::kPrimInt:
894    case Primitive::kPrimNot:
895    case Primitive::kPrimFloat:
896      locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal())));
897      break;
898
899    case Primitive::kPrimLong:
900    case Primitive::kPrimDouble:
901      locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal())));
902      break;
903
904    default:
905      LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType();
906  }
907}
908
909void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) {
910}
911
912void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) {
913  LocationSummary* locations =
914      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
915  locations->SetOut(Location::ConstantLocation(constant));
916}
917
918void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) {
919  // Will be generated at use site.
920}
921
922void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) {
923  LocationSummary* locations =
924      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
925  locations->SetOut(Location::ConstantLocation(constant));
926}
927
928void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) {
929  // Will be generated at use site.
930}
931
932void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) {
933  LocationSummary* locations =
934      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
935  locations->SetOut(Location::ConstantLocation(constant));
936}
937
938void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) {
939  // Will be generated at use site.
940}
941
942void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) {
943  LocationSummary* locations =
944      new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall);
945  locations->SetOut(Location::ConstantLocation(constant));
946}
947
948void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) {
949  // Will be generated at use site.
950}
951
952void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) {
953  ret->SetLocations(nullptr);
954}
955
956void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) {
957  codegen_->GenerateFrameExit();
958}
959
960void LocationsBuilderARM::VisitReturn(HReturn* ret) {
961  LocationSummary* locations =
962      new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall);
963  locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType()));
964}
965
966void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) {
967  codegen_->GenerateFrameExit();
968}
969
970void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) {
971  HandleInvoke(invoke);
972}
973
974void InstructionCodeGeneratorARM::LoadCurrentMethod(Register reg) {
975  __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset);
976}
977
978void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) {
979  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
980  uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>);
981  size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() +
982      invoke->GetIndexInDexCache() * kArmWordSize;
983
984  // TODO: Implement all kinds of calls:
985  // 1) boot -> boot
986  // 2) app -> boot
987  // 3) app -> app
988  //
989  // Currently we implement the app -> app logic, which looks up in the resolve cache.
990
991  // temp = method;
992  LoadCurrentMethod(temp);
993  // temp = temp->dex_cache_resolved_methods_;
994  __ LoadFromOffset(kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value());
995  // temp = temp[index_in_cache]
996  __ LoadFromOffset(kLoadWord, temp, temp, index_in_cache);
997  // LR = temp[offset_of_quick_compiled_code]
998  __ LoadFromOffset(kLoadWord, LR, temp,
999                     mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value());
1000  // LR()
1001  __ blx(LR);
1002
1003  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1004  DCHECK(!codegen_->IsLeafMethod());
1005}
1006
1007void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1008  HandleInvoke(invoke);
1009}
1010
1011void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) {
1012  LocationSummary* locations =
1013      new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall);
1014  locations->AddTemp(Location::RegisterLocation(R0));
1015
1016  InvokeDexCallingConventionVisitor calling_convention_visitor;
1017  for (size_t i = 0; i < invoke->InputCount(); i++) {
1018    HInstruction* input = invoke->InputAt(i);
1019    locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType()));
1020  }
1021
1022  locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType()));
1023}
1024
1025
1026void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) {
1027  Register temp = invoke->GetLocations()->GetTemp(0).As<Register>();
1028  uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() +
1029          invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry);
1030  LocationSummary* locations = invoke->GetLocations();
1031  Location receiver = locations->InAt(0);
1032  uint32_t class_offset = mirror::Object::ClassOffset().Int32Value();
1033  // temp = object->GetClass();
1034  if (receiver.IsStackSlot()) {
1035    __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex());
1036    __ LoadFromOffset(kLoadWord, temp, temp, class_offset);
1037  } else {
1038    __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset);
1039  }
1040  // temp = temp->GetMethodAt(method_offset);
1041  uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value();
1042  __ LoadFromOffset(kLoadWord, temp, temp, method_offset);
1043  // LR = temp->GetEntryPoint();
1044  __ LoadFromOffset(kLoadWord, LR, temp, entry_point);
1045  // LR();
1046  __ blx(LR);
1047  DCHECK(!codegen_->IsLeafMethod());
1048  codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
1049}
1050
1051void LocationsBuilderARM::VisitNeg(HNeg* neg) {
1052  LocationSummary* locations =
1053      new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
1054  switch (neg->GetResultType()) {
1055    case Primitive::kPrimInt:
1056    case Primitive::kPrimLong: {
1057      bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong);
1058      locations->SetInAt(0, Location::RequiresRegister());
1059      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1060      break;
1061    }
1062
1063    case Primitive::kPrimFloat:
1064    case Primitive::kPrimDouble:
1065      LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType();
1066      break;
1067
1068    default:
1069      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1070  }
1071}
1072
1073void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) {
1074  LocationSummary* locations = neg->GetLocations();
1075  Location out = locations->Out();
1076  Location in = locations->InAt(0);
1077  switch (neg->GetResultType()) {
1078    case Primitive::kPrimInt:
1079      DCHECK(in.IsRegister());
1080      __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0));
1081      break;
1082
1083    case Primitive::kPrimLong:
1084      DCHECK(in.IsRegisterPair());
1085      // out.lo = 0 - in.lo (and update the carry/borrow (C) flag)
1086      __ rsbs(out.AsRegisterPairLow<Register>(),
1087              in.AsRegisterPairLow<Register>(),
1088              ShifterOperand(0));
1089      // We cannot emit an RSC (Reverse Subtract with Carry)
1090      // instruction here, as it does not exist in the Thumb-2
1091      // instruction set.  We use the following approach
1092      // using SBC and SUB instead.
1093      //
1094      // out.hi = -C
1095      __ sbc(out.AsRegisterPairHigh<Register>(),
1096             out.AsRegisterPairHigh<Register>(),
1097             ShifterOperand(out.AsRegisterPairHigh<Register>()));
1098      // out.hi = out.hi - in.hi
1099      __ sub(out.AsRegisterPairHigh<Register>(),
1100             out.AsRegisterPairHigh<Register>(),
1101             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1102      break;
1103
1104    case Primitive::kPrimFloat:
1105    case Primitive::kPrimDouble:
1106      LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType();
1107      break;
1108
1109    default:
1110      LOG(FATAL) << "Unexpected neg type " << neg->GetResultType();
1111  }
1112}
1113
1114void LocationsBuilderARM::VisitAdd(HAdd* add) {
1115  LocationSummary* locations =
1116      new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall);
1117  switch (add->GetResultType()) {
1118    case Primitive::kPrimInt:
1119    case Primitive::kPrimLong: {
1120      bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong);
1121      locations->SetInAt(0, Location::RequiresRegister());
1122      locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1)));
1123      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1124      break;
1125    }
1126
1127    case Primitive::kPrimFloat:
1128    case Primitive::kPrimDouble: {
1129      locations->SetInAt(0, Location::RequiresFpuRegister());
1130      locations->SetInAt(1, Location::RequiresFpuRegister());
1131      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1132      break;
1133    }
1134
1135    default:
1136      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1137  }
1138}
1139
1140void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) {
1141  LocationSummary* locations = add->GetLocations();
1142  Location out = locations->Out();
1143  Location first = locations->InAt(0);
1144  Location second = locations->InAt(1);
1145  switch (add->GetResultType()) {
1146    case Primitive::kPrimInt:
1147      if (second.IsRegister()) {
1148        __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1149      } else {
1150        __ AddConstant(out.As<Register>(),
1151                       first.As<Register>(),
1152                       second.GetConstant()->AsIntConstant()->GetValue());
1153      }
1154      break;
1155
1156    case Primitive::kPrimLong:
1157      __ adds(out.AsRegisterPairLow<Register>(),
1158              first.AsRegisterPairLow<Register>(),
1159              ShifterOperand(second.AsRegisterPairLow<Register>()));
1160      __ adc(out.AsRegisterPairHigh<Register>(),
1161             first.AsRegisterPairHigh<Register>(),
1162             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1163      break;
1164
1165    case Primitive::kPrimFloat:
1166      __ vadds(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1167      break;
1168
1169    case Primitive::kPrimDouble:
1170      __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1171               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1172               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1173      break;
1174
1175    default:
1176      LOG(FATAL) << "Unexpected add type " << add->GetResultType();
1177  }
1178}
1179
1180void LocationsBuilderARM::VisitSub(HSub* sub) {
1181  LocationSummary* locations =
1182      new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall);
1183  switch (sub->GetResultType()) {
1184    case Primitive::kPrimInt:
1185    case Primitive::kPrimLong: {
1186      bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong);
1187      locations->SetInAt(0, Location::RequiresRegister());
1188      locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1)));
1189      locations->SetOut(Location::RequiresRegister(), output_overlaps);
1190      break;
1191    }
1192    case Primitive::kPrimFloat:
1193    case Primitive::kPrimDouble: {
1194      locations->SetInAt(0, Location::RequiresFpuRegister());
1195      locations->SetInAt(1, Location::RequiresFpuRegister());
1196      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1197      break;
1198    }
1199    default:
1200      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1201  }
1202}
1203
1204void InstructionCodeGeneratorARM::VisitSub(HSub* sub) {
1205  LocationSummary* locations = sub->GetLocations();
1206  Location out = locations->Out();
1207  Location first = locations->InAt(0);
1208  Location second = locations->InAt(1);
1209  switch (sub->GetResultType()) {
1210    case Primitive::kPrimInt: {
1211      if (second.IsRegister()) {
1212        __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>()));
1213      } else {
1214        __ AddConstant(out.As<Register>(),
1215                       first.As<Register>(),
1216                       -second.GetConstant()->AsIntConstant()->GetValue());
1217      }
1218      break;
1219    }
1220
1221    case Primitive::kPrimLong: {
1222      __ subs(out.AsRegisterPairLow<Register>(),
1223              first.AsRegisterPairLow<Register>(),
1224              ShifterOperand(second.AsRegisterPairLow<Register>()));
1225      __ sbc(out.AsRegisterPairHigh<Register>(),
1226             first.AsRegisterPairHigh<Register>(),
1227             ShifterOperand(second.AsRegisterPairHigh<Register>()));
1228      break;
1229    }
1230
1231    case Primitive::kPrimFloat: {
1232      __ vsubs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1233      break;
1234    }
1235
1236    case Primitive::kPrimDouble: {
1237      __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1238               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1239               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1240      break;
1241    }
1242
1243
1244    default:
1245      LOG(FATAL) << "Unexpected sub type " << sub->GetResultType();
1246  }
1247}
1248
1249void LocationsBuilderARM::VisitMul(HMul* mul) {
1250  LocationSummary* locations =
1251      new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall);
1252  switch (mul->GetResultType()) {
1253    case Primitive::kPrimInt:
1254    case Primitive::kPrimLong:  {
1255      locations->SetInAt(0, Location::RequiresRegister());
1256      locations->SetInAt(1, Location::RequiresRegister());
1257      locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1258      break;
1259    }
1260
1261    case Primitive::kPrimFloat:
1262    case Primitive::kPrimDouble: {
1263      locations->SetInAt(0, Location::RequiresFpuRegister());
1264      locations->SetInAt(1, Location::RequiresFpuRegister());
1265      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1266      break;
1267    }
1268
1269    default:
1270      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1271  }
1272}
1273
1274void InstructionCodeGeneratorARM::VisitMul(HMul* mul) {
1275  LocationSummary* locations = mul->GetLocations();
1276  Location out = locations->Out();
1277  Location first = locations->InAt(0);
1278  Location second = locations->InAt(1);
1279  switch (mul->GetResultType()) {
1280    case Primitive::kPrimInt: {
1281      __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>());
1282      break;
1283    }
1284    case Primitive::kPrimLong: {
1285      Register out_hi = out.AsRegisterPairHigh<Register>();
1286      Register out_lo = out.AsRegisterPairLow<Register>();
1287      Register in1_hi = first.AsRegisterPairHigh<Register>();
1288      Register in1_lo = first.AsRegisterPairLow<Register>();
1289      Register in2_hi = second.AsRegisterPairHigh<Register>();
1290      Register in2_lo = second.AsRegisterPairLow<Register>();
1291
1292      // Extra checks to protect caused by the existence of R1_R2.
1293      // The algorithm is wrong if out.hi is either in1.lo or in2.lo:
1294      // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2);
1295      DCHECK_NE(out_hi, in1_lo);
1296      DCHECK_NE(out_hi, in2_lo);
1297
1298      // input: in1 - 64 bits, in2 - 64 bits
1299      // output: out
1300      // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo
1301      // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32]
1302      // parts: out.lo = (in1.lo * in2.lo)[31:0]
1303
1304      // IP <- in1.lo * in2.hi
1305      __ mul(IP, in1_lo, in2_hi);
1306      // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo
1307      __ mla(out_hi, in1_hi, in2_lo, IP);
1308      // out.lo <- (in1.lo * in2.lo)[31:0];
1309      __ umull(out_lo, IP, in1_lo, in2_lo);
1310      // out.hi <- in2.hi * in1.lo +  in2.lo * in1.hi + (in1.lo * in2.lo)[63:32]
1311      __ add(out_hi, out_hi, ShifterOperand(IP));
1312      break;
1313    }
1314
1315    case Primitive::kPrimFloat: {
1316      __ vmuls(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1317      break;
1318    }
1319
1320    case Primitive::kPrimDouble: {
1321      __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1322               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1323               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1324      break;
1325    }
1326
1327    default:
1328      LOG(FATAL) << "Unexpected mul type " << mul->GetResultType();
1329  }
1330}
1331
1332void LocationsBuilderARM::VisitDiv(HDiv* div) {
1333  LocationSummary* locations =
1334      new (GetGraph()->GetArena()) LocationSummary(div, LocationSummary::kNoCall);
1335  switch (div->GetResultType()) {
1336    case Primitive::kPrimInt:
1337    case Primitive::kPrimLong: {
1338      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1339      break;
1340    }
1341    case Primitive::kPrimFloat:
1342    case Primitive::kPrimDouble: {
1343      locations->SetInAt(0, Location::RequiresFpuRegister());
1344      locations->SetInAt(1, Location::RequiresFpuRegister());
1345      locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap);
1346      break;
1347    }
1348
1349    default:
1350      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1351  }
1352}
1353
1354void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) {
1355  LocationSummary* locations = div->GetLocations();
1356  Location out = locations->Out();
1357  Location first = locations->InAt(0);
1358  Location second = locations->InAt(1);
1359
1360  switch (div->GetResultType()) {
1361    case Primitive::kPrimInt:
1362    case Primitive::kPrimLong: {
1363      LOG(FATAL) << "Not implemented div type" << div->GetResultType();
1364      break;
1365    }
1366
1367    case Primitive::kPrimFloat: {
1368      __ vdivs(out.As<SRegister>(), first.As<SRegister>(), second.As<SRegister>());
1369      break;
1370    }
1371
1372    case Primitive::kPrimDouble: {
1373      __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()),
1374               FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()),
1375               FromLowSToD(second.AsFpuRegisterPairLow<SRegister>()));
1376      break;
1377    }
1378
1379    default:
1380      LOG(FATAL) << "Unexpected div type " << div->GetResultType();
1381  }
1382}
1383
1384void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
1385  LocationSummary* locations =
1386      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1387  InvokeRuntimeCallingConvention calling_convention;
1388  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1389  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1390  locations->SetOut(Location::RegisterLocation(R0));
1391}
1392
1393void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
1394  InvokeRuntimeCallingConvention calling_convention;
1395  LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1396  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1397
1398  int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocObjectWithAccessCheck).Int32Value();
1399  __ LoadFromOffset(kLoadWord, LR, TR, offset);
1400  __ blx(LR);
1401
1402  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1403  DCHECK(!codegen_->IsLeafMethod());
1404}
1405
1406void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) {
1407  LocationSummary* locations =
1408      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall);
1409  InvokeRuntimeCallingConvention calling_convention;
1410  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1411  locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1412  locations->SetOut(Location::RegisterLocation(R0));
1413  locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1414}
1415
1416void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) {
1417  InvokeRuntimeCallingConvention calling_convention;
1418  LoadCurrentMethod(calling_convention.GetRegisterAt(1));
1419  __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex());
1420
1421  int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocArrayWithAccessCheck).Int32Value();
1422  __ LoadFromOffset(kLoadWord, LR, TR, offset);
1423  __ blx(LR);
1424
1425  codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1426  DCHECK(!codegen_->IsLeafMethod());
1427}
1428
1429void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) {
1430  LocationSummary* locations =
1431      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1432  Location location = parameter_visitor_.GetNextLocation(instruction->GetType());
1433  if (location.IsStackSlot()) {
1434    location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1435  } else if (location.IsDoubleStackSlot()) {
1436    location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize());
1437  }
1438  locations->SetOut(location);
1439}
1440
1441void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) {
1442  // Nothing to do, the parameter is already at its location.
1443}
1444
1445void LocationsBuilderARM::VisitNot(HNot* not_) {
1446  LocationSummary* locations =
1447      new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall);
1448  locations->SetInAt(0, Location::RequiresRegister());
1449  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1450}
1451
1452void InstructionCodeGeneratorARM::VisitNot(HNot* not_) {
1453  LocationSummary* locations = not_->GetLocations();
1454  Location out = locations->Out();
1455  Location in = locations->InAt(0);
1456  switch (not_->InputAt(0)->GetType()) {
1457    case Primitive::kPrimBoolean:
1458      __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1));
1459      break;
1460
1461    case Primitive::kPrimInt:
1462      __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>()));
1463      break;
1464
1465    case Primitive::kPrimLong:
1466      __ mvn(out.AsRegisterPairLow<Register>(),
1467             ShifterOperand(in.AsRegisterPairLow<Register>()));
1468      __ mvn(out.AsRegisterPairHigh<Register>(),
1469             ShifterOperand(in.AsRegisterPairHigh<Register>()));
1470      break;
1471
1472    default:
1473      LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType();
1474  }
1475}
1476
1477void LocationsBuilderARM::VisitCompare(HCompare* compare) {
1478  LocationSummary* locations =
1479      new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall);
1480  locations->SetInAt(0, Location::RequiresRegister());
1481  locations->SetInAt(1, Location::RequiresRegister());
1482  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1483}
1484
1485void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) {
1486  Label greater, done;
1487  LocationSummary* locations = compare->GetLocations();
1488  switch (compare->InputAt(0)->GetType()) {
1489    case Primitive::kPrimLong: {
1490      Register output = locations->Out().As<Register>();
1491      Location left = locations->InAt(0);
1492      Location right = locations->InAt(1);
1493      Label less, greater, done;
1494      __ cmp(left.AsRegisterPairHigh<Register>(),
1495             ShifterOperand(right.AsRegisterPairHigh<Register>()));  // Signed compare.
1496      __ b(&less, LT);
1497      __ b(&greater, GT);
1498      // Do LoadImmediate before any `cmp`, as LoadImmediate might affect
1499      // the status flags.
1500      __ LoadImmediate(output, 0);
1501      __ cmp(left.AsRegisterPairLow<Register>(),
1502             ShifterOperand(right.AsRegisterPairLow<Register>()));  // Unsigned compare.
1503      __ b(&done, EQ);
1504      __ b(&less, CC);
1505
1506      __ Bind(&greater);
1507      __ LoadImmediate(output, 1);
1508      __ b(&done);
1509
1510      __ Bind(&less);
1511      __ LoadImmediate(output, -1);
1512
1513      __ Bind(&done);
1514      break;
1515    }
1516    default:
1517      LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType();
1518  }
1519}
1520
1521void LocationsBuilderARM::VisitPhi(HPhi* instruction) {
1522  LocationSummary* locations =
1523      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1524  for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) {
1525    locations->SetInAt(i, Location::Any());
1526  }
1527  locations->SetOut(Location::Any());
1528}
1529
1530void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) {
1531  LOG(FATAL) << "Unreachable";
1532}
1533
1534void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1535  LocationSummary* locations =
1536      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1537  bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot;
1538  locations->SetInAt(0, Location::RequiresRegister());
1539  locations->SetInAt(1, Location::RequiresRegister());
1540  // Temporary registers for the write barrier.
1541  if (is_object_type) {
1542    locations->AddTemp(Location::RequiresRegister());
1543    locations->AddTemp(Location::RequiresRegister());
1544  }
1545}
1546
1547void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) {
1548  LocationSummary* locations = instruction->GetLocations();
1549  Register obj = locations->InAt(0).As<Register>();
1550  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1551  Primitive::Type field_type = instruction->GetFieldType();
1552
1553  switch (field_type) {
1554    case Primitive::kPrimBoolean:
1555    case Primitive::kPrimByte: {
1556      Register value = locations->InAt(1).As<Register>();
1557      __ StoreToOffset(kStoreByte, value, obj, offset);
1558      break;
1559    }
1560
1561    case Primitive::kPrimShort:
1562    case Primitive::kPrimChar: {
1563      Register value = locations->InAt(1).As<Register>();
1564      __ StoreToOffset(kStoreHalfword, value, obj, offset);
1565      break;
1566    }
1567
1568    case Primitive::kPrimInt:
1569    case Primitive::kPrimNot: {
1570      Register value = locations->InAt(1).As<Register>();
1571      __ StoreToOffset(kStoreWord, value, obj, offset);
1572      if (field_type == Primitive::kPrimNot) {
1573        Register temp = locations->GetTemp(0).As<Register>();
1574        Register card = locations->GetTemp(1).As<Register>();
1575        codegen_->MarkGCCard(temp, card, obj, value);
1576      }
1577      break;
1578    }
1579
1580    case Primitive::kPrimLong: {
1581      Location value = locations->InAt(1);
1582      __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
1583      break;
1584    }
1585
1586    case Primitive::kPrimFloat:
1587    case Primitive::kPrimDouble:
1588      LOG(FATAL) << "Unimplemented register type " << field_type;
1589      UNREACHABLE();
1590    case Primitive::kPrimVoid:
1591      LOG(FATAL) << "Unreachable type " << field_type;
1592      UNREACHABLE();
1593  }
1594}
1595
1596void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1597  LocationSummary* locations =
1598      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1599  locations->SetInAt(0, Location::RequiresRegister());
1600  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1601}
1602
1603void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) {
1604  LocationSummary* locations = instruction->GetLocations();
1605  Register obj = locations->InAt(0).As<Register>();
1606  uint32_t offset = instruction->GetFieldOffset().Uint32Value();
1607
1608  switch (instruction->GetType()) {
1609    case Primitive::kPrimBoolean: {
1610      Register out = locations->Out().As<Register>();
1611      __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1612      break;
1613    }
1614
1615    case Primitive::kPrimByte: {
1616      Register out = locations->Out().As<Register>();
1617      __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1618      break;
1619    }
1620
1621    case Primitive::kPrimShort: {
1622      Register out = locations->Out().As<Register>();
1623      __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1624      break;
1625    }
1626
1627    case Primitive::kPrimChar: {
1628      Register out = locations->Out().As<Register>();
1629      __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1630      break;
1631    }
1632
1633    case Primitive::kPrimInt:
1634    case Primitive::kPrimNot: {
1635      Register out = locations->Out().As<Register>();
1636      __ LoadFromOffset(kLoadWord, out, obj, offset);
1637      break;
1638    }
1639
1640    case Primitive::kPrimLong: {
1641      // TODO: support volatile.
1642      Location out = locations->Out();
1643      __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
1644      break;
1645    }
1646
1647    case Primitive::kPrimFloat:
1648    case Primitive::kPrimDouble:
1649      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1650      UNREACHABLE();
1651    case Primitive::kPrimVoid:
1652      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1653      UNREACHABLE();
1654  }
1655}
1656
1657void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) {
1658  LocationSummary* locations =
1659      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1660  locations->SetInAt(0, Location::RequiresRegister());
1661  if (instruction->HasUses()) {
1662    locations->SetOut(Location::SameAsFirstInput());
1663  }
1664}
1665
1666void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) {
1667  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction);
1668  codegen_->AddSlowPath(slow_path);
1669
1670  LocationSummary* locations = instruction->GetLocations();
1671  Location obj = locations->InAt(0);
1672
1673  if (obj.IsRegister()) {
1674    __ cmp(obj.As<Register>(), ShifterOperand(0));
1675    __ b(slow_path->GetEntryLabel(), EQ);
1676  } else {
1677    DCHECK(obj.IsConstant()) << obj;
1678    DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0);
1679    __ b(slow_path->GetEntryLabel());
1680  }
1681}
1682
1683void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) {
1684  LocationSummary* locations =
1685      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1686  locations->SetInAt(0, Location::RequiresRegister());
1687  locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1688  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1689}
1690
1691void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) {
1692  LocationSummary* locations = instruction->GetLocations();
1693  Register obj = locations->InAt(0).As<Register>();
1694  Location index = locations->InAt(1);
1695
1696  switch (instruction->GetType()) {
1697    case Primitive::kPrimBoolean: {
1698      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1699      Register out = locations->Out().As<Register>();
1700      if (index.IsConstant()) {
1701        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1702        __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset);
1703      } else {
1704        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1705        __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset);
1706      }
1707      break;
1708    }
1709
1710    case Primitive::kPrimByte: {
1711      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value();
1712      Register out = locations->Out().As<Register>();
1713      if (index.IsConstant()) {
1714        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1715        __ LoadFromOffset(kLoadSignedByte, out, obj, offset);
1716      } else {
1717        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1718        __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset);
1719      }
1720      break;
1721    }
1722
1723    case Primitive::kPrimShort: {
1724      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value();
1725      Register out = locations->Out().As<Register>();
1726      if (index.IsConstant()) {
1727        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1728        __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset);
1729      } else {
1730        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
1731        __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset);
1732      }
1733      break;
1734    }
1735
1736    case Primitive::kPrimChar: {
1737      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1738      Register out = locations->Out().As<Register>();
1739      if (index.IsConstant()) {
1740        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1741        __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset);
1742      } else {
1743        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
1744        __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset);
1745      }
1746      break;
1747    }
1748
1749    case Primitive::kPrimInt:
1750    case Primitive::kPrimNot: {
1751      DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t));
1752      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1753      Register out = locations->Out().As<Register>();
1754      if (index.IsConstant()) {
1755        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1756        __ LoadFromOffset(kLoadWord, out, obj, offset);
1757      } else {
1758        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
1759        __ LoadFromOffset(kLoadWord, out, IP, data_offset);
1760      }
1761      break;
1762    }
1763
1764    case Primitive::kPrimLong: {
1765      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1766      Location out = locations->Out();
1767      if (index.IsConstant()) {
1768        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1769        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset);
1770      } else {
1771        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
1772        __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset);
1773      }
1774      break;
1775    }
1776
1777    case Primitive::kPrimFloat:
1778    case Primitive::kPrimDouble:
1779      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1780      UNREACHABLE();
1781    case Primitive::kPrimVoid:
1782      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1783      UNREACHABLE();
1784  }
1785}
1786
1787void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) {
1788  Primitive::Type value_type = instruction->GetComponentType();
1789  bool is_object = value_type == Primitive::kPrimNot;
1790  LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(
1791      instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall);
1792  if (is_object) {
1793    InvokeRuntimeCallingConvention calling_convention;
1794    locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
1795    locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
1796    locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2)));
1797  } else {
1798    locations->SetInAt(0, Location::RequiresRegister());
1799    locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1)));
1800    locations->SetInAt(2, Location::RequiresRegister());
1801  }
1802}
1803
1804void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) {
1805  LocationSummary* locations = instruction->GetLocations();
1806  Register obj = locations->InAt(0).As<Register>();
1807  Location index = locations->InAt(1);
1808  Primitive::Type value_type = instruction->GetComponentType();
1809
1810  switch (value_type) {
1811    case Primitive::kPrimBoolean:
1812    case Primitive::kPrimByte: {
1813      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value();
1814      Register value = locations->InAt(2).As<Register>();
1815      if (index.IsConstant()) {
1816        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset;
1817        __ StoreToOffset(kStoreByte, value, obj, offset);
1818      } else {
1819        __ add(IP, obj, ShifterOperand(index.As<Register>()));
1820        __ StoreToOffset(kStoreByte, value, IP, data_offset);
1821      }
1822      break;
1823    }
1824
1825    case Primitive::kPrimShort:
1826    case Primitive::kPrimChar: {
1827      uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value();
1828      Register value = locations->InAt(2).As<Register>();
1829      if (index.IsConstant()) {
1830        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset;
1831        __ StoreToOffset(kStoreHalfword, value, obj, offset);
1832      } else {
1833        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2));
1834        __ StoreToOffset(kStoreHalfword, value, IP, data_offset);
1835      }
1836      break;
1837    }
1838
1839    case Primitive::kPrimInt: {
1840      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value();
1841      Register value = locations->InAt(2).As<Register>();
1842      if (index.IsConstant()) {
1843        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset;
1844        __ StoreToOffset(kStoreWord, value, obj, offset);
1845      } else {
1846        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4));
1847        __ StoreToOffset(kStoreWord, value, IP, data_offset);
1848      }
1849      break;
1850    }
1851
1852    case Primitive::kPrimNot: {
1853      int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAputObject).Int32Value();
1854      __ LoadFromOffset(kLoadWord, LR, TR, offset);
1855      __ blx(LR);
1856      codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
1857      DCHECK(!codegen_->IsLeafMethod());
1858      break;
1859    }
1860
1861    case Primitive::kPrimLong: {
1862      uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value();
1863      Location value = locations->InAt(2);
1864      if (index.IsConstant()) {
1865        size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset;
1866        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset);
1867      } else {
1868        __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8));
1869        __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset);
1870      }
1871      break;
1872    }
1873
1874    case Primitive::kPrimFloat:
1875    case Primitive::kPrimDouble:
1876      LOG(FATAL) << "Unimplemented register type " << instruction->GetType();
1877      UNREACHABLE();
1878    case Primitive::kPrimVoid:
1879      LOG(FATAL) << "Unreachable type " << instruction->GetType();
1880      UNREACHABLE();
1881  }
1882}
1883
1884void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) {
1885  LocationSummary* locations =
1886      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1887  locations->SetInAt(0, Location::RequiresRegister());
1888  locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
1889}
1890
1891void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) {
1892  LocationSummary* locations = instruction->GetLocations();
1893  uint32_t offset = mirror::Array::LengthOffset().Uint32Value();
1894  Register obj = locations->InAt(0).As<Register>();
1895  Register out = locations->Out().As<Register>();
1896  __ LoadFromOffset(kLoadWord, out, obj, offset);
1897}
1898
1899void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) {
1900  LocationSummary* locations =
1901      new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall);
1902  locations->SetInAt(0, Location::RequiresRegister());
1903  locations->SetInAt(1, Location::RequiresRegister());
1904  if (instruction->HasUses()) {
1905    locations->SetOut(Location::SameAsFirstInput());
1906  }
1907}
1908
1909void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) {
1910  LocationSummary* locations = instruction->GetLocations();
1911  SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM(
1912      instruction, locations->InAt(0), locations->InAt(1));
1913  codegen_->AddSlowPath(slow_path);
1914
1915  Register index = locations->InAt(0).As<Register>();
1916  Register length = locations->InAt(1).As<Register>();
1917
1918  __ cmp(index, ShifterOperand(length));
1919  __ b(slow_path->GetEntryLabel(), CS);
1920}
1921
1922void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) {
1923  Label is_null;
1924  __ CompareAndBranchIfZero(value, &is_null);
1925  __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value());
1926  __ Lsr(temp, object, gc::accounting::CardTable::kCardShift);
1927  __ strb(card, Address(card, temp));
1928  __ Bind(&is_null);
1929}
1930
1931void LocationsBuilderARM::VisitTemporary(HTemporary* temp) {
1932  temp->SetLocations(nullptr);
1933}
1934
1935void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) {
1936  // Nothing to do, this is driven by the code generator.
1937}
1938
1939void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) {
1940  LOG(FATAL) << "Unreachable";
1941}
1942
1943void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) {
1944  codegen_->GetMoveResolver()->EmitNativeCode(instruction);
1945}
1946
1947void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) {
1948  new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath);
1949}
1950
1951void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) {
1952  HBasicBlock* block = instruction->GetBlock();
1953  if (block->GetLoopInformation() != nullptr) {
1954    DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction);
1955    // The back edge will generate the suspend check.
1956    return;
1957  }
1958  if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) {
1959    // The goto will generate the suspend check.
1960    return;
1961  }
1962  GenerateSuspendCheck(instruction, nullptr);
1963}
1964
1965void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction,
1966                                                       HBasicBlock* successor) {
1967  SuspendCheckSlowPathARM* slow_path =
1968      new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor);
1969  codegen_->AddSlowPath(slow_path);
1970
1971  __ subs(R4, R4, ShifterOperand(1));
1972  if (successor == nullptr) {
1973    __ b(slow_path->GetEntryLabel(), EQ);
1974    __ Bind(slow_path->GetReturnLabel());
1975  } else {
1976    __ b(codegen_->GetLabelOf(successor), NE);
1977    __ b(slow_path->GetEntryLabel());
1978  }
1979}
1980
1981ArmAssembler* ParallelMoveResolverARM::GetAssembler() const {
1982  return codegen_->GetAssembler();
1983}
1984
1985void ParallelMoveResolverARM::EmitMove(size_t index) {
1986  MoveOperands* move = moves_.Get(index);
1987  Location source = move->GetSource();
1988  Location destination = move->GetDestination();
1989
1990  if (source.IsRegister()) {
1991    if (destination.IsRegister()) {
1992      __ Mov(destination.As<Register>(), source.As<Register>());
1993    } else {
1994      DCHECK(destination.IsStackSlot());
1995      __ StoreToOffset(kStoreWord, source.As<Register>(),
1996                       SP, destination.GetStackIndex());
1997    }
1998  } else if (source.IsStackSlot()) {
1999    if (destination.IsRegister()) {
2000      __ LoadFromOffset(kLoadWord, destination.As<Register>(),
2001                        SP, source.GetStackIndex());
2002    } else {
2003      DCHECK(destination.IsStackSlot());
2004      __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex());
2005      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2006    }
2007  } else {
2008    DCHECK(source.IsConstant());
2009    DCHECK(source.GetConstant()->IsIntConstant());
2010    int32_t value = source.GetConstant()->AsIntConstant()->GetValue();
2011    if (destination.IsRegister()) {
2012      __ LoadImmediate(destination.As<Register>(), value);
2013    } else {
2014      DCHECK(destination.IsStackSlot());
2015      __ LoadImmediate(IP, value);
2016      __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex());
2017    }
2018  }
2019}
2020
2021void ParallelMoveResolverARM::Exchange(Register reg, int mem) {
2022  __ Mov(IP, reg);
2023  __ LoadFromOffset(kLoadWord, reg, SP, mem);
2024  __ StoreToOffset(kStoreWord, IP, SP, mem);
2025}
2026
2027void ParallelMoveResolverARM::Exchange(int mem1, int mem2) {
2028  ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters());
2029  int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0;
2030  __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()),
2031                    SP, mem1 + stack_offset);
2032  __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset);
2033  __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()),
2034                   SP, mem2 + stack_offset);
2035  __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset);
2036}
2037
2038void ParallelMoveResolverARM::EmitSwap(size_t index) {
2039  MoveOperands* move = moves_.Get(index);
2040  Location source = move->GetSource();
2041  Location destination = move->GetDestination();
2042
2043  if (source.IsRegister() && destination.IsRegister()) {
2044    DCHECK_NE(source.As<Register>(), IP);
2045    DCHECK_NE(destination.As<Register>(), IP);
2046    __ Mov(IP, source.As<Register>());
2047    __ Mov(source.As<Register>(), destination.As<Register>());
2048    __ Mov(destination.As<Register>(), IP);
2049  } else if (source.IsRegister() && destination.IsStackSlot()) {
2050    Exchange(source.As<Register>(), destination.GetStackIndex());
2051  } else if (source.IsStackSlot() && destination.IsRegister()) {
2052    Exchange(destination.As<Register>(), source.GetStackIndex());
2053  } else if (source.IsStackSlot() && destination.IsStackSlot()) {
2054    Exchange(source.GetStackIndex(), destination.GetStackIndex());
2055  } else {
2056    LOG(FATAL) << "Unimplemented";
2057  }
2058}
2059
2060void ParallelMoveResolverARM::SpillScratch(int reg) {
2061  __ Push(static_cast<Register>(reg));
2062}
2063
2064void ParallelMoveResolverARM::RestoreScratch(int reg) {
2065  __ Pop(static_cast<Register>(reg));
2066}
2067
2068}  // namespace arm
2069}  // namespace art
2070