1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/bits.h"
6#include "src/compiler/instruction-selector-impl.h"
7#include "src/compiler/node-matchers.h"
8
9namespace v8 {
10namespace internal {
11namespace compiler {
12
13// Adds Arm-specific methods for generating InstructionOperands.
14class ArmOperandGenerator FINAL : public OperandGenerator {
15 public:
16  explicit ArmOperandGenerator(InstructionSelector* selector)
17      : OperandGenerator(selector) {}
18
19  InstructionOperand* UseOperand(Node* node, InstructionCode opcode) {
20    if (CanBeImmediate(node, opcode)) {
21      return UseImmediate(node);
22    }
23    return UseRegister(node);
24  }
25
26  bool CanBeImmediate(Node* node, InstructionCode opcode) {
27    Int32Matcher m(node);
28    if (!m.HasValue()) return false;
29    int32_t value = m.Value();
30    switch (ArchOpcodeField::decode(opcode)) {
31      case kArmAnd:
32      case kArmMov:
33      case kArmMvn:
34      case kArmBic:
35        return ImmediateFitsAddrMode1Instruction(value) ||
36               ImmediateFitsAddrMode1Instruction(~value);
37
38      case kArmAdd:
39      case kArmSub:
40      case kArmCmp:
41      case kArmCmn:
42        return ImmediateFitsAddrMode1Instruction(value) ||
43               ImmediateFitsAddrMode1Instruction(-value);
44
45      case kArmTst:
46      case kArmTeq:
47      case kArmOrr:
48      case kArmEor:
49      case kArmRsb:
50        return ImmediateFitsAddrMode1Instruction(value);
51
52      case kArmVldr32:
53      case kArmVstr32:
54      case kArmVldr64:
55      case kArmVstr64:
56        return value >= -1020 && value <= 1020 && (value % 4) == 0;
57
58      case kArmLdrb:
59      case kArmLdrsb:
60      case kArmStrb:
61      case kArmLdr:
62      case kArmStr:
63      case kArmStoreWriteBarrier:
64        return value >= -4095 && value <= 4095;
65
66      case kArmLdrh:
67      case kArmLdrsh:
68      case kArmStrh:
69        return value >= -255 && value <= 255;
70
71      case kArchCallCodeObject:
72      case kArchCallJSFunction:
73      case kArchJmp:
74      case kArchNop:
75      case kArchRet:
76      case kArchTruncateDoubleToI:
77      case kArmMul:
78      case kArmMla:
79      case kArmMls:
80      case kArmSdiv:
81      case kArmUdiv:
82      case kArmBfc:
83      case kArmUbfx:
84      case kArmVcmpF64:
85      case kArmVaddF64:
86      case kArmVsubF64:
87      case kArmVmulF64:
88      case kArmVmlaF64:
89      case kArmVmlsF64:
90      case kArmVdivF64:
91      case kArmVmodF64:
92      case kArmVnegF64:
93      case kArmVsqrtF64:
94      case kArmVcvtF64S32:
95      case kArmVcvtF64U32:
96      case kArmVcvtS32F64:
97      case kArmVcvtU32F64:
98      case kArmPush:
99        return false;
100    }
101    UNREACHABLE();
102    return false;
103  }
104
105 private:
106  bool ImmediateFitsAddrMode1Instruction(int32_t imm) const {
107    return Assembler::ImmediateFitsAddrMode1Instruction(imm);
108  }
109};
110
111
112static void VisitRRRFloat64(InstructionSelector* selector, ArchOpcode opcode,
113                            Node* node) {
114  ArmOperandGenerator g(selector);
115  selector->Emit(opcode, g.DefineAsRegister(node),
116                 g.UseRegister(node->InputAt(0)),
117                 g.UseRegister(node->InputAt(1)));
118}
119
120
121static bool TryMatchROR(InstructionSelector* selector,
122                        InstructionCode* opcode_return, Node* node,
123                        InstructionOperand** value_return,
124                        InstructionOperand** shift_return) {
125  ArmOperandGenerator g(selector);
126  if (node->opcode() != IrOpcode::kWord32Ror) return false;
127  Int32BinopMatcher m(node);
128  *value_return = g.UseRegister(m.left().node());
129  if (m.right().IsInRange(1, 31)) {
130    *opcode_return |= AddressingModeField::encode(kMode_Operand2_R_ROR_I);
131    *shift_return = g.UseImmediate(m.right().node());
132  } else {
133    *opcode_return |= AddressingModeField::encode(kMode_Operand2_R_ROR_R);
134    *shift_return = g.UseRegister(m.right().node());
135  }
136  return true;
137}
138
139
140static inline bool TryMatchASR(InstructionSelector* selector,
141                               InstructionCode* opcode_return, Node* node,
142                               InstructionOperand** value_return,
143                               InstructionOperand** shift_return) {
144  ArmOperandGenerator g(selector);
145  if (node->opcode() != IrOpcode::kWord32Sar) return false;
146  Int32BinopMatcher m(node);
147  *value_return = g.UseRegister(m.left().node());
148  if (m.right().IsInRange(1, 32)) {
149    *opcode_return |= AddressingModeField::encode(kMode_Operand2_R_ASR_I);
150    *shift_return = g.UseImmediate(m.right().node());
151  } else {
152    *opcode_return |= AddressingModeField::encode(kMode_Operand2_R_ASR_R);
153    *shift_return = g.UseRegister(m.right().node());
154  }
155  return true;
156}
157
158
159static inline bool TryMatchLSL(InstructionSelector* selector,
160                               InstructionCode* opcode_return, Node* node,
161                               InstructionOperand** value_return,
162                               InstructionOperand** shift_return) {
163  ArmOperandGenerator g(selector);
164  if (node->opcode() != IrOpcode::kWord32Shl) return false;
165  Int32BinopMatcher m(node);
166  *value_return = g.UseRegister(m.left().node());
167  if (m.right().IsInRange(0, 31)) {
168    *opcode_return |= AddressingModeField::encode(kMode_Operand2_R_LSL_I);
169    *shift_return = g.UseImmediate(m.right().node());
170  } else {
171    *opcode_return |= AddressingModeField::encode(kMode_Operand2_R_LSL_R);
172    *shift_return = g.UseRegister(m.right().node());
173  }
174  return true;
175}
176
177
178static inline bool TryMatchLSR(InstructionSelector* selector,
179                               InstructionCode* opcode_return, Node* node,
180                               InstructionOperand** value_return,
181                               InstructionOperand** shift_return) {
182  ArmOperandGenerator g(selector);
183  if (node->opcode() != IrOpcode::kWord32Shr) return false;
184  Int32BinopMatcher m(node);
185  *value_return = g.UseRegister(m.left().node());
186  if (m.right().IsInRange(1, 32)) {
187    *opcode_return |= AddressingModeField::encode(kMode_Operand2_R_LSR_I);
188    *shift_return = g.UseImmediate(m.right().node());
189  } else {
190    *opcode_return |= AddressingModeField::encode(kMode_Operand2_R_LSR_R);
191    *shift_return = g.UseRegister(m.right().node());
192  }
193  return true;
194}
195
196
197static inline bool TryMatchShift(InstructionSelector* selector,
198                                 InstructionCode* opcode_return, Node* node,
199                                 InstructionOperand** value_return,
200                                 InstructionOperand** shift_return) {
201  return (
202      TryMatchASR(selector, opcode_return, node, value_return, shift_return) ||
203      TryMatchLSL(selector, opcode_return, node, value_return, shift_return) ||
204      TryMatchLSR(selector, opcode_return, node, value_return, shift_return) ||
205      TryMatchROR(selector, opcode_return, node, value_return, shift_return));
206}
207
208
209static inline bool TryMatchImmediateOrShift(InstructionSelector* selector,
210                                            InstructionCode* opcode_return,
211                                            Node* node,
212                                            size_t* input_count_return,
213                                            InstructionOperand** inputs) {
214  ArmOperandGenerator g(selector);
215  if (g.CanBeImmediate(node, *opcode_return)) {
216    *opcode_return |= AddressingModeField::encode(kMode_Operand2_I);
217    inputs[0] = g.UseImmediate(node);
218    *input_count_return = 1;
219    return true;
220  }
221  if (TryMatchShift(selector, opcode_return, node, &inputs[0], &inputs[1])) {
222    *input_count_return = 2;
223    return true;
224  }
225  return false;
226}
227
228
229static void VisitBinop(InstructionSelector* selector, Node* node,
230                       InstructionCode opcode, InstructionCode reverse_opcode,
231                       FlagsContinuation* cont) {
232  ArmOperandGenerator g(selector);
233  Int32BinopMatcher m(node);
234  InstructionOperand* inputs[5];
235  size_t input_count = 0;
236  InstructionOperand* outputs[2];
237  size_t output_count = 0;
238
239  if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(),
240                               &input_count, &inputs[1])) {
241    inputs[0] = g.UseRegister(m.left().node());
242    input_count++;
243  } else if (TryMatchImmediateOrShift(selector, &reverse_opcode,
244                                      m.left().node(), &input_count,
245                                      &inputs[1])) {
246    inputs[0] = g.UseRegister(m.right().node());
247    opcode = reverse_opcode;
248    input_count++;
249  } else {
250    opcode |= AddressingModeField::encode(kMode_Operand2_R);
251    inputs[input_count++] = g.UseRegister(m.left().node());
252    inputs[input_count++] = g.UseRegister(m.right().node());
253  }
254
255  if (cont->IsBranch()) {
256    inputs[input_count++] = g.Label(cont->true_block());
257    inputs[input_count++] = g.Label(cont->false_block());
258  }
259
260  outputs[output_count++] = g.DefineAsRegister(node);
261  if (cont->IsSet()) {
262    outputs[output_count++] = g.DefineAsRegister(cont->result());
263  }
264
265  DCHECK_NE(0, input_count);
266  DCHECK_NE(0, output_count);
267  DCHECK_GE(arraysize(inputs), input_count);
268  DCHECK_GE(arraysize(outputs), output_count);
269  DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
270
271  Instruction* instr = selector->Emit(cont->Encode(opcode), output_count,
272                                      outputs, input_count, inputs);
273  if (cont->IsBranch()) instr->MarkAsControl();
274}
275
276
277static void VisitBinop(InstructionSelector* selector, Node* node,
278                       InstructionCode opcode, InstructionCode reverse_opcode) {
279  FlagsContinuation cont;
280  VisitBinop(selector, node, opcode, reverse_opcode, &cont);
281}
282
283
284void InstructionSelector::VisitLoad(Node* node) {
285  MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node));
286  MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node));
287  ArmOperandGenerator g(this);
288  Node* base = node->InputAt(0);
289  Node* index = node->InputAt(1);
290
291  ArchOpcode opcode;
292  switch (rep) {
293    case kRepFloat32:
294      opcode = kArmVldr32;
295      break;
296    case kRepFloat64:
297      opcode = kArmVldr64;
298      break;
299    case kRepBit:  // Fall through.
300    case kRepWord8:
301      opcode = typ == kTypeUint32 ? kArmLdrb : kArmLdrsb;
302      break;
303    case kRepWord16:
304      opcode = typ == kTypeUint32 ? kArmLdrh : kArmLdrsh;
305      break;
306    case kRepTagged:  // Fall through.
307    case kRepWord32:
308      opcode = kArmLdr;
309      break;
310    default:
311      UNREACHABLE();
312      return;
313  }
314
315  if (g.CanBeImmediate(index, opcode)) {
316    Emit(opcode | AddressingModeField::encode(kMode_Offset_RI),
317         g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index));
318  } else {
319    Emit(opcode | AddressingModeField::encode(kMode_Offset_RR),
320         g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
321  }
322}
323
324
325void InstructionSelector::VisitStore(Node* node) {
326  ArmOperandGenerator g(this);
327  Node* base = node->InputAt(0);
328  Node* index = node->InputAt(1);
329  Node* value = node->InputAt(2);
330
331  StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node);
332  MachineType rep = RepresentationOf(store_rep.machine_type());
333  if (store_rep.write_barrier_kind() == kFullWriteBarrier) {
334    DCHECK(rep == kRepTagged);
335    // TODO(dcarney): refactor RecordWrite function to take temp registers
336    //                and pass them here instead of using fixed regs
337    // TODO(dcarney): handle immediate indices.
338    InstructionOperand* temps[] = {g.TempRegister(r5), g.TempRegister(r6)};
339    Emit(kArmStoreWriteBarrier, NULL, g.UseFixed(base, r4),
340         g.UseFixed(index, r5), g.UseFixed(value, r6), arraysize(temps),
341         temps);
342    return;
343  }
344  DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind());
345
346  ArchOpcode opcode;
347  switch (rep) {
348    case kRepFloat32:
349      opcode = kArmVstr32;
350      break;
351    case kRepFloat64:
352      opcode = kArmVstr64;
353      break;
354    case kRepBit:  // Fall through.
355    case kRepWord8:
356      opcode = kArmStrb;
357      break;
358    case kRepWord16:
359      opcode = kArmStrh;
360      break;
361    case kRepTagged:  // Fall through.
362    case kRepWord32:
363      opcode = kArmStr;
364      break;
365    default:
366      UNREACHABLE();
367      return;
368  }
369
370  if (g.CanBeImmediate(index, opcode)) {
371    Emit(opcode | AddressingModeField::encode(kMode_Offset_RI), NULL,
372         g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value));
373  } else {
374    Emit(opcode | AddressingModeField::encode(kMode_Offset_RR), NULL,
375         g.UseRegister(base), g.UseRegister(index), g.UseRegister(value));
376  }
377}
378
379
380static inline void EmitBic(InstructionSelector* selector, Node* node,
381                           Node* left, Node* right) {
382  ArmOperandGenerator g(selector);
383  InstructionCode opcode = kArmBic;
384  InstructionOperand* value_operand;
385  InstructionOperand* shift_operand;
386  if (TryMatchShift(selector, &opcode, right, &value_operand, &shift_operand)) {
387    selector->Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
388                   value_operand, shift_operand);
389    return;
390  }
391  selector->Emit(opcode | AddressingModeField::encode(kMode_Operand2_R),
392                 g.DefineAsRegister(node), g.UseRegister(left),
393                 g.UseRegister(right));
394}
395
396
397void InstructionSelector::VisitWord32And(Node* node) {
398  ArmOperandGenerator g(this);
399  Int32BinopMatcher m(node);
400  if (m.left().IsWord32Xor() && CanCover(node, m.left().node())) {
401    Int32BinopMatcher mleft(m.left().node());
402    if (mleft.right().Is(-1)) {
403      EmitBic(this, node, m.right().node(), mleft.left().node());
404      return;
405    }
406  }
407  if (m.right().IsWord32Xor() && CanCover(node, m.right().node())) {
408    Int32BinopMatcher mright(m.right().node());
409    if (mright.right().Is(-1)) {
410      EmitBic(this, node, m.left().node(), mright.left().node());
411      return;
412    }
413  }
414  if (IsSupported(ARMv7) && m.right().HasValue()) {
415    uint32_t value = m.right().Value();
416    uint32_t width = base::bits::CountPopulation32(value);
417    uint32_t msb = base::bits::CountLeadingZeros32(value);
418    if (width != 0 && msb + width == 32) {
419      DCHECK_EQ(0, base::bits::CountTrailingZeros32(value));
420      if (m.left().IsWord32Shr()) {
421        Int32BinopMatcher mleft(m.left().node());
422        if (mleft.right().IsInRange(0, 31)) {
423          Emit(kArmUbfx, g.DefineAsRegister(node),
424               g.UseRegister(mleft.left().node()),
425               g.UseImmediate(mleft.right().node()), g.TempImmediate(width));
426          return;
427        }
428      }
429      Emit(kArmUbfx, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
430           g.TempImmediate(0), g.TempImmediate(width));
431      return;
432    }
433    // Try to interpret this AND as BFC.
434    width = 32 - width;
435    msb = base::bits::CountLeadingZeros32(~value);
436    uint32_t lsb = base::bits::CountTrailingZeros32(~value);
437    if (msb + width + lsb == 32) {
438      Emit(kArmBfc, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
439           g.TempImmediate(lsb), g.TempImmediate(width));
440      return;
441    }
442  }
443  VisitBinop(this, node, kArmAnd, kArmAnd);
444}
445
446
447void InstructionSelector::VisitWord32Or(Node* node) {
448  VisitBinop(this, node, kArmOrr, kArmOrr);
449}
450
451
452void InstructionSelector::VisitWord32Xor(Node* node) {
453  ArmOperandGenerator g(this);
454  Int32BinopMatcher m(node);
455  if (m.right().Is(-1)) {
456    InstructionCode opcode = kArmMvn;
457    InstructionOperand* value_operand;
458    InstructionOperand* shift_operand;
459    if (TryMatchShift(this, &opcode, m.left().node(), &value_operand,
460                      &shift_operand)) {
461      Emit(opcode, g.DefineAsRegister(node), value_operand, shift_operand);
462      return;
463    }
464    Emit(opcode | AddressingModeField::encode(kMode_Operand2_R),
465         g.DefineAsRegister(node), g.UseRegister(m.left().node()));
466    return;
467  }
468  VisitBinop(this, node, kArmEor, kArmEor);
469}
470
471
472template <typename TryMatchShift>
473static inline void VisitShift(InstructionSelector* selector, Node* node,
474                              TryMatchShift try_match_shift,
475                              FlagsContinuation* cont) {
476  ArmOperandGenerator g(selector);
477  InstructionCode opcode = kArmMov;
478  InstructionOperand* inputs[4];
479  size_t input_count = 2;
480  InstructionOperand* outputs[2];
481  size_t output_count = 0;
482
483  CHECK(try_match_shift(selector, &opcode, node, &inputs[0], &inputs[1]));
484
485  if (cont->IsBranch()) {
486    inputs[input_count++] = g.Label(cont->true_block());
487    inputs[input_count++] = g.Label(cont->false_block());
488  }
489
490  outputs[output_count++] = g.DefineAsRegister(node);
491  if (cont->IsSet()) {
492    outputs[output_count++] = g.DefineAsRegister(cont->result());
493  }
494
495  DCHECK_NE(0, input_count);
496  DCHECK_NE(0, output_count);
497  DCHECK_GE(arraysize(inputs), input_count);
498  DCHECK_GE(arraysize(outputs), output_count);
499  DCHECK_NE(kMode_None, AddressingModeField::decode(opcode));
500
501  Instruction* instr = selector->Emit(cont->Encode(opcode), output_count,
502                                      outputs, input_count, inputs);
503  if (cont->IsBranch()) instr->MarkAsControl();
504}
505
506
507template <typename TryMatchShift>
508static inline void VisitShift(InstructionSelector* selector, Node* node,
509                              TryMatchShift try_match_shift) {
510  FlagsContinuation cont;
511  VisitShift(selector, node, try_match_shift, &cont);
512}
513
514
515void InstructionSelector::VisitWord32Shl(Node* node) {
516  VisitShift(this, node, TryMatchLSL);
517}
518
519
520void InstructionSelector::VisitWord32Shr(Node* node) {
521  ArmOperandGenerator g(this);
522  Int32BinopMatcher m(node);
523  if (IsSupported(ARMv7) && m.left().IsWord32And() &&
524      m.right().IsInRange(0, 31)) {
525    int32_t lsb = m.right().Value();
526    Int32BinopMatcher mleft(m.left().node());
527    if (mleft.right().HasValue()) {
528      uint32_t value = (mleft.right().Value() >> lsb) << lsb;
529      uint32_t width = base::bits::CountPopulation32(value);
530      uint32_t msb = base::bits::CountLeadingZeros32(value);
531      if (msb + width + lsb == 32) {
532        DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(value));
533        Emit(kArmUbfx, g.DefineAsRegister(node),
534             g.UseRegister(mleft.left().node()), g.TempImmediate(lsb),
535             g.TempImmediate(width));
536        return;
537      }
538    }
539  }
540  VisitShift(this, node, TryMatchLSR);
541}
542
543
544void InstructionSelector::VisitWord32Sar(Node* node) {
545  VisitShift(this, node, TryMatchASR);
546}
547
548
549void InstructionSelector::VisitWord32Ror(Node* node) {
550  VisitShift(this, node, TryMatchROR);
551}
552
553
554void InstructionSelector::VisitInt32Add(Node* node) {
555  ArmOperandGenerator g(this);
556  Int32BinopMatcher m(node);
557  if (m.left().IsInt32Mul() && CanCover(node, m.left().node())) {
558    Int32BinopMatcher mleft(m.left().node());
559    Emit(kArmMla, g.DefineAsRegister(node), g.UseRegister(mleft.left().node()),
560         g.UseRegister(mleft.right().node()), g.UseRegister(m.right().node()));
561    return;
562  }
563  if (m.right().IsInt32Mul() && CanCover(node, m.right().node())) {
564    Int32BinopMatcher mright(m.right().node());
565    Emit(kArmMla, g.DefineAsRegister(node), g.UseRegister(mright.left().node()),
566         g.UseRegister(mright.right().node()), g.UseRegister(m.left().node()));
567    return;
568  }
569  VisitBinop(this, node, kArmAdd, kArmAdd);
570}
571
572
573void InstructionSelector::VisitInt32Sub(Node* node) {
574  ArmOperandGenerator g(this);
575  Int32BinopMatcher m(node);
576  if (IsSupported(MLS) && m.right().IsInt32Mul() &&
577      CanCover(node, m.right().node())) {
578    Int32BinopMatcher mright(m.right().node());
579    Emit(kArmMls, g.DefineAsRegister(node), g.UseRegister(mright.left().node()),
580         g.UseRegister(mright.right().node()), g.UseRegister(m.left().node()));
581    return;
582  }
583  VisitBinop(this, node, kArmSub, kArmRsb);
584}
585
586
587void InstructionSelector::VisitInt32Mul(Node* node) {
588  ArmOperandGenerator g(this);
589  Int32BinopMatcher m(node);
590  if (m.right().HasValue() && m.right().Value() > 0) {
591    int32_t value = m.right().Value();
592    if (base::bits::IsPowerOfTwo32(value - 1)) {
593      Emit(kArmAdd | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
594           g.DefineAsRegister(node), g.UseRegister(m.left().node()),
595           g.UseRegister(m.left().node()),
596           g.TempImmediate(WhichPowerOf2(value - 1)));
597      return;
598    }
599    if (value < kMaxInt && base::bits::IsPowerOfTwo32(value + 1)) {
600      Emit(kArmRsb | AddressingModeField::encode(kMode_Operand2_R_LSL_I),
601           g.DefineAsRegister(node), g.UseRegister(m.left().node()),
602           g.UseRegister(m.left().node()),
603           g.TempImmediate(WhichPowerOf2(value + 1)));
604      return;
605    }
606  }
607  Emit(kArmMul, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
608       g.UseRegister(m.right().node()));
609}
610
611
612static void EmitDiv(InstructionSelector* selector, ArchOpcode div_opcode,
613                    ArchOpcode f64i32_opcode, ArchOpcode i32f64_opcode,
614                    InstructionOperand* result_operand,
615                    InstructionOperand* left_operand,
616                    InstructionOperand* right_operand) {
617  ArmOperandGenerator g(selector);
618  if (selector->IsSupported(SUDIV)) {
619    selector->Emit(div_opcode, result_operand, left_operand, right_operand);
620    return;
621  }
622  InstructionOperand* left_double_operand = g.TempDoubleRegister();
623  InstructionOperand* right_double_operand = g.TempDoubleRegister();
624  InstructionOperand* result_double_operand = g.TempDoubleRegister();
625  selector->Emit(f64i32_opcode, left_double_operand, left_operand);
626  selector->Emit(f64i32_opcode, right_double_operand, right_operand);
627  selector->Emit(kArmVdivF64, result_double_operand, left_double_operand,
628                 right_double_operand);
629  selector->Emit(i32f64_opcode, result_operand, result_double_operand);
630}
631
632
633static void VisitDiv(InstructionSelector* selector, Node* node,
634                     ArchOpcode div_opcode, ArchOpcode f64i32_opcode,
635                     ArchOpcode i32f64_opcode) {
636  ArmOperandGenerator g(selector);
637  Int32BinopMatcher m(node);
638  EmitDiv(selector, div_opcode, f64i32_opcode, i32f64_opcode,
639          g.DefineAsRegister(node), g.UseRegister(m.left().node()),
640          g.UseRegister(m.right().node()));
641}
642
643
644void InstructionSelector::VisitInt32Div(Node* node) {
645  VisitDiv(this, node, kArmSdiv, kArmVcvtF64S32, kArmVcvtS32F64);
646}
647
648
649void InstructionSelector::VisitInt32UDiv(Node* node) {
650  VisitDiv(this, node, kArmUdiv, kArmVcvtF64U32, kArmVcvtU32F64);
651}
652
653
654static void VisitMod(InstructionSelector* selector, Node* node,
655                     ArchOpcode div_opcode, ArchOpcode f64i32_opcode,
656                     ArchOpcode i32f64_opcode) {
657  ArmOperandGenerator g(selector);
658  Int32BinopMatcher m(node);
659  InstructionOperand* div_operand = g.TempRegister();
660  InstructionOperand* result_operand = g.DefineAsRegister(node);
661  InstructionOperand* left_operand = g.UseRegister(m.left().node());
662  InstructionOperand* right_operand = g.UseRegister(m.right().node());
663  EmitDiv(selector, div_opcode, f64i32_opcode, i32f64_opcode, div_operand,
664          left_operand, right_operand);
665  if (selector->IsSupported(MLS)) {
666    selector->Emit(kArmMls, result_operand, div_operand, right_operand,
667                   left_operand);
668    return;
669  }
670  InstructionOperand* mul_operand = g.TempRegister();
671  selector->Emit(kArmMul, mul_operand, div_operand, right_operand);
672  selector->Emit(kArmSub, result_operand, left_operand, mul_operand);
673}
674
675
676void InstructionSelector::VisitInt32Mod(Node* node) {
677  VisitMod(this, node, kArmSdiv, kArmVcvtF64S32, kArmVcvtS32F64);
678}
679
680
681void InstructionSelector::VisitInt32UMod(Node* node) {
682  VisitMod(this, node, kArmUdiv, kArmVcvtF64U32, kArmVcvtU32F64);
683}
684
685
686void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
687  ArmOperandGenerator g(this);
688  Emit(kArmVcvtF64S32, g.DefineAsRegister(node),
689       g.UseRegister(node->InputAt(0)));
690}
691
692
693void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
694  ArmOperandGenerator g(this);
695  Emit(kArmVcvtF64U32, g.DefineAsRegister(node),
696       g.UseRegister(node->InputAt(0)));
697}
698
699
700void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
701  ArmOperandGenerator g(this);
702  Emit(kArmVcvtS32F64, g.DefineAsRegister(node),
703       g.UseRegister(node->InputAt(0)));
704}
705
706
707void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
708  ArmOperandGenerator g(this);
709  Emit(kArmVcvtU32F64, g.DefineAsRegister(node),
710       g.UseRegister(node->InputAt(0)));
711}
712
713
714void InstructionSelector::VisitFloat64Add(Node* node) {
715  ArmOperandGenerator g(this);
716  Int32BinopMatcher m(node);
717  if (m.left().IsFloat64Mul() && CanCover(node, m.left().node())) {
718    Int32BinopMatcher mleft(m.left().node());
719    Emit(kArmVmlaF64, g.DefineSameAsFirst(node),
720         g.UseRegister(m.right().node()), g.UseRegister(mleft.left().node()),
721         g.UseRegister(mleft.right().node()));
722    return;
723  }
724  if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
725    Int32BinopMatcher mright(m.right().node());
726    Emit(kArmVmlaF64, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
727         g.UseRegister(mright.left().node()),
728         g.UseRegister(mright.right().node()));
729    return;
730  }
731  VisitRRRFloat64(this, kArmVaddF64, node);
732}
733
734
735void InstructionSelector::VisitFloat64Sub(Node* node) {
736  ArmOperandGenerator g(this);
737  Int32BinopMatcher m(node);
738  if (m.right().IsFloat64Mul() && CanCover(node, m.right().node())) {
739    Int32BinopMatcher mright(m.right().node());
740    Emit(kArmVmlsF64, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
741         g.UseRegister(mright.left().node()),
742         g.UseRegister(mright.right().node()));
743    return;
744  }
745  VisitRRRFloat64(this, kArmVsubF64, node);
746}
747
748
749void InstructionSelector::VisitFloat64Mul(Node* node) {
750  ArmOperandGenerator g(this);
751  Float64BinopMatcher m(node);
752  if (m.right().Is(-1.0)) {
753    Emit(kArmVnegF64, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
754  } else {
755    VisitRRRFloat64(this, kArmVmulF64, node);
756  }
757}
758
759
760void InstructionSelector::VisitFloat64Div(Node* node) {
761  VisitRRRFloat64(this, kArmVdivF64, node);
762}
763
764
765void InstructionSelector::VisitFloat64Mod(Node* node) {
766  ArmOperandGenerator g(this);
767  Emit(kArmVmodF64, g.DefineAsFixed(node, d0), g.UseFixed(node->InputAt(0), d0),
768       g.UseFixed(node->InputAt(1), d1))->MarkAsCall();
769}
770
771
772void InstructionSelector::VisitFloat64Sqrt(Node* node) {
773  ArmOperandGenerator g(this);
774  Emit(kArmVsqrtF64, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
775}
776
777
778void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
779                                    BasicBlock* deoptimization) {
780  ArmOperandGenerator g(this);
781  CallDescriptor* descriptor = OpParameter<CallDescriptor*>(call);
782
783  FrameStateDescriptor* frame_state_descriptor = NULL;
784  if (descriptor->NeedsFrameState()) {
785    frame_state_descriptor =
786        GetFrameStateDescriptor(call->InputAt(descriptor->InputCount()));
787  }
788
789  CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
790
791  // Compute InstructionOperands for inputs and outputs.
792  // TODO(turbofan): on ARM64 it's probably better to use the code object in a
793  // register if there are multiple uses of it. Improve constant pool and the
794  // heuristics in the register allocator for where to emit constants.
795  InitializeCallBuffer(call, &buffer, true, false);
796
797  // TODO(dcarney): might be possible to use claim/poke instead
798  // Push any stack arguments.
799  for (NodeVectorRIter input = buffer.pushed_nodes.rbegin();
800       input != buffer.pushed_nodes.rend(); input++) {
801    Emit(kArmPush, NULL, g.UseRegister(*input));
802  }
803
804  // Select the appropriate opcode based on the call type.
805  InstructionCode opcode;
806  switch (descriptor->kind()) {
807    case CallDescriptor::kCallCodeObject: {
808      opcode = kArchCallCodeObject;
809      break;
810    }
811    case CallDescriptor::kCallJSFunction:
812      opcode = kArchCallJSFunction;
813      break;
814    default:
815      UNREACHABLE();
816      return;
817  }
818  opcode |= MiscField::encode(descriptor->flags());
819
820  // Emit the call instruction.
821  Instruction* call_instr =
822      Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(),
823           buffer.instruction_args.size(), &buffer.instruction_args.front());
824
825  call_instr->MarkAsCall();
826  if (deoptimization != NULL) {
827    DCHECK(continuation != NULL);
828    call_instr->MarkAsControl();
829  }
830}
831
832
833void InstructionSelector::VisitInt32AddWithOverflow(Node* node,
834                                                    FlagsContinuation* cont) {
835  VisitBinop(this, node, kArmAdd, kArmAdd, cont);
836}
837
838
839void InstructionSelector::VisitInt32SubWithOverflow(Node* node,
840                                                    FlagsContinuation* cont) {
841  VisitBinop(this, node, kArmSub, kArmRsb, cont);
842}
843
844
845// Shared routine for multiple compare operations.
846static void VisitWordCompare(InstructionSelector* selector, Node* node,
847                             InstructionCode opcode, FlagsContinuation* cont,
848                             bool commutative) {
849  ArmOperandGenerator g(selector);
850  Int32BinopMatcher m(node);
851  InstructionOperand* inputs[5];
852  size_t input_count = 0;
853  InstructionOperand* outputs[1];
854  size_t output_count = 0;
855
856  if (TryMatchImmediateOrShift(selector, &opcode, m.right().node(),
857                               &input_count, &inputs[1])) {
858    inputs[0] = g.UseRegister(m.left().node());
859    input_count++;
860  } else if (TryMatchImmediateOrShift(selector, &opcode, m.left().node(),
861                                      &input_count, &inputs[1])) {
862    if (!commutative) cont->Commute();
863    inputs[0] = g.UseRegister(m.right().node());
864    input_count++;
865  } else {
866    opcode |= AddressingModeField::encode(kMode_Operand2_R);
867    inputs[input_count++] = g.UseRegister(m.left().node());
868    inputs[input_count++] = g.UseRegister(m.right().node());
869  }
870
871  if (cont->IsBranch()) {
872    inputs[input_count++] = g.Label(cont->true_block());
873    inputs[input_count++] = g.Label(cont->false_block());
874  } else {
875    DCHECK(cont->IsSet());
876    outputs[output_count++] = g.DefineAsRegister(cont->result());
877  }
878
879  DCHECK_NE(0, input_count);
880  DCHECK_GE(arraysize(inputs), input_count);
881  DCHECK_GE(arraysize(outputs), output_count);
882
883  Instruction* instr = selector->Emit(cont->Encode(opcode), output_count,
884                                      outputs, input_count, inputs);
885  if (cont->IsBranch()) instr->MarkAsControl();
886}
887
888
889void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) {
890  switch (node->opcode()) {
891    case IrOpcode::kInt32Add:
892      return VisitWordCompare(this, node, kArmCmn, cont, true);
893    case IrOpcode::kInt32Sub:
894      return VisitWordCompare(this, node, kArmCmp, cont, false);
895    case IrOpcode::kWord32And:
896      return VisitWordCompare(this, node, kArmTst, cont, true);
897    case IrOpcode::kWord32Or:
898      return VisitBinop(this, node, kArmOrr, kArmOrr, cont);
899    case IrOpcode::kWord32Xor:
900      return VisitWordCompare(this, node, kArmTeq, cont, true);
901    case IrOpcode::kWord32Sar:
902      return VisitShift(this, node, TryMatchASR, cont);
903    case IrOpcode::kWord32Shl:
904      return VisitShift(this, node, TryMatchLSL, cont);
905    case IrOpcode::kWord32Shr:
906      return VisitShift(this, node, TryMatchLSR, cont);
907    case IrOpcode::kWord32Ror:
908      return VisitShift(this, node, TryMatchROR, cont);
909    default:
910      break;
911  }
912
913  ArmOperandGenerator g(this);
914  InstructionCode opcode =
915      cont->Encode(kArmTst) | AddressingModeField::encode(kMode_Operand2_R);
916  if (cont->IsBranch()) {
917    Emit(opcode, NULL, g.UseRegister(node), g.UseRegister(node),
918         g.Label(cont->true_block()),
919         g.Label(cont->false_block()))->MarkAsControl();
920  } else {
921    Emit(opcode, g.DefineAsRegister(cont->result()), g.UseRegister(node),
922         g.UseRegister(node));
923  }
924}
925
926
927void InstructionSelector::VisitWord32Compare(Node* node,
928                                             FlagsContinuation* cont) {
929  VisitWordCompare(this, node, kArmCmp, cont, false);
930}
931
932
933void InstructionSelector::VisitFloat64Compare(Node* node,
934                                              FlagsContinuation* cont) {
935  ArmOperandGenerator g(this);
936  Float64BinopMatcher m(node);
937  if (cont->IsBranch()) {
938    Emit(cont->Encode(kArmVcmpF64), NULL, g.UseRegister(m.left().node()),
939         g.UseRegister(m.right().node()), g.Label(cont->true_block()),
940         g.Label(cont->false_block()))->MarkAsControl();
941  } else {
942    DCHECK(cont->IsSet());
943    Emit(cont->Encode(kArmVcmpF64), g.DefineAsRegister(cont->result()),
944         g.UseRegister(m.left().node()), g.UseRegister(m.right().node()));
945  }
946}
947
948}  // namespace compiler
949}  // namespace internal
950}  // namespace v8
951