1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/adapters.h"
6#include "src/compiler/instruction-selector-impl.h"
7#include "src/compiler/node-matchers.h"
8#include "src/compiler/node-properties.h"
9#include "src/ppc/frames-ppc.h"
10
11namespace v8 {
12namespace internal {
13namespace compiler {
14
15enum ImmediateMode {
16  kInt16Imm,
17  kInt16Imm_Unsigned,
18  kInt16Imm_Negate,
19  kInt16Imm_4ByteAligned,
20  kShift32Imm,
21  kShift64Imm,
22  kNoImmediate
23};
24
25
26// Adds PPC-specific methods for generating operands.
27class PPCOperandGenerator final : public OperandGenerator {
28 public:
29  explicit PPCOperandGenerator(InstructionSelector* selector)
30      : OperandGenerator(selector) {}
31
32  InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
33    if (CanBeImmediate(node, mode)) {
34      return UseImmediate(node);
35    }
36    return UseRegister(node);
37  }
38
39  bool CanBeImmediate(Node* node, ImmediateMode mode) {
40    int64_t value;
41    if (node->opcode() == IrOpcode::kInt32Constant)
42      value = OpParameter<int32_t>(node);
43    else if (node->opcode() == IrOpcode::kInt64Constant)
44      value = OpParameter<int64_t>(node);
45    else
46      return false;
47    return CanBeImmediate(value, mode);
48  }
49
50  bool CanBeImmediate(int64_t value, ImmediateMode mode) {
51    switch (mode) {
52      case kInt16Imm:
53        return is_int16(value);
54      case kInt16Imm_Unsigned:
55        return is_uint16(value);
56      case kInt16Imm_Negate:
57        return is_int16(-value);
58      case kInt16Imm_4ByteAligned:
59        return is_int16(value) && !(value & 3);
60      case kShift32Imm:
61        return 0 <= value && value < 32;
62      case kShift64Imm:
63        return 0 <= value && value < 64;
64      case kNoImmediate:
65        return false;
66    }
67    return false;
68  }
69};
70
71
72namespace {
73
74void VisitRR(InstructionSelector* selector, InstructionCode opcode,
75             Node* node) {
76  PPCOperandGenerator g(selector);
77  selector->Emit(opcode, g.DefineAsRegister(node),
78                 g.UseRegister(node->InputAt(0)));
79}
80
81void VisitRRR(InstructionSelector* selector, InstructionCode opcode,
82              Node* node) {
83  PPCOperandGenerator g(selector);
84  selector->Emit(opcode, g.DefineAsRegister(node),
85                 g.UseRegister(node->InputAt(0)),
86                 g.UseRegister(node->InputAt(1)));
87}
88
89void VisitRRO(InstructionSelector* selector, InstructionCode opcode, Node* node,
90              ImmediateMode operand_mode) {
91  PPCOperandGenerator g(selector);
92  selector->Emit(opcode, g.DefineAsRegister(node),
93                 g.UseRegister(node->InputAt(0)),
94                 g.UseOperand(node->InputAt(1), operand_mode));
95}
96
97
98#if V8_TARGET_ARCH_PPC64
99void VisitTryTruncateDouble(InstructionSelector* selector,
100                            InstructionCode opcode, Node* node) {
101  PPCOperandGenerator g(selector);
102  InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
103  InstructionOperand outputs[2];
104  size_t output_count = 0;
105  outputs[output_count++] = g.DefineAsRegister(node);
106
107  Node* success_output = NodeProperties::FindProjection(node, 1);
108  if (success_output) {
109    outputs[output_count++] = g.DefineAsRegister(success_output);
110  }
111
112  selector->Emit(opcode, output_count, outputs, 1, inputs);
113}
114#endif
115
116
117// Shared routine for multiple binary operations.
118template <typename Matcher>
119void VisitBinop(InstructionSelector* selector, Node* node,
120                InstructionCode opcode, ImmediateMode operand_mode,
121                FlagsContinuation* cont) {
122  PPCOperandGenerator g(selector);
123  Matcher m(node);
124  InstructionOperand inputs[4];
125  size_t input_count = 0;
126  InstructionOperand outputs[2];
127  size_t output_count = 0;
128
129  inputs[input_count++] = g.UseRegister(m.left().node());
130  inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
131
132  if (cont->IsBranch()) {
133    inputs[input_count++] = g.Label(cont->true_block());
134    inputs[input_count++] = g.Label(cont->false_block());
135  }
136
137  outputs[output_count++] = g.DefineAsRegister(node);
138  if (cont->IsSet()) {
139    outputs[output_count++] = g.DefineAsRegister(cont->result());
140  }
141
142  DCHECK_NE(0u, input_count);
143  DCHECK_NE(0u, output_count);
144  DCHECK_GE(arraysize(inputs), input_count);
145  DCHECK_GE(arraysize(outputs), output_count);
146
147  opcode = cont->Encode(opcode);
148  if (cont->IsDeoptimize()) {
149    selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
150                             cont->frame_state());
151  } else {
152    selector->Emit(opcode, output_count, outputs, input_count, inputs);
153  }
154}
155
156
157// Shared routine for multiple binary operations.
158template <typename Matcher>
159void VisitBinop(InstructionSelector* selector, Node* node,
160                InstructionCode opcode, ImmediateMode operand_mode) {
161  FlagsContinuation cont;
162  VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
163}
164
165}  // namespace
166
167
168void InstructionSelector::VisitLoad(Node* node) {
169  LoadRepresentation load_rep = LoadRepresentationOf(node->op());
170  PPCOperandGenerator g(this);
171  Node* base = node->InputAt(0);
172  Node* offset = node->InputAt(1);
173  ArchOpcode opcode = kArchNop;
174  ImmediateMode mode = kInt16Imm;
175  switch (load_rep.representation()) {
176    case MachineRepresentation::kFloat32:
177      opcode = kPPC_LoadFloat32;
178      break;
179    case MachineRepresentation::kFloat64:
180      opcode = kPPC_LoadDouble;
181      break;
182    case MachineRepresentation::kBit:  // Fall through.
183    case MachineRepresentation::kWord8:
184      opcode = load_rep.IsSigned() ? kPPC_LoadWordS8 : kPPC_LoadWordU8;
185      break;
186    case MachineRepresentation::kWord16:
187      opcode = load_rep.IsSigned() ? kPPC_LoadWordS16 : kPPC_LoadWordU16;
188      break;
189#if !V8_TARGET_ARCH_PPC64
190    case MachineRepresentation::kTagged:  // Fall through.
191#endif
192    case MachineRepresentation::kWord32:
193      opcode = kPPC_LoadWordU32;
194      break;
195#if V8_TARGET_ARCH_PPC64
196    case MachineRepresentation::kTagged:  // Fall through.
197    case MachineRepresentation::kWord64:
198      opcode = kPPC_LoadWord64;
199      mode = kInt16Imm_4ByteAligned;
200      break;
201#else
202    case MachineRepresentation::kWord64:  // Fall through.
203#endif
204    case MachineRepresentation::kSimd128:  // Fall through.
205    case MachineRepresentation::kNone:
206      UNREACHABLE();
207      return;
208  }
209  if (g.CanBeImmediate(offset, mode)) {
210    Emit(opcode | AddressingModeField::encode(kMode_MRI),
211         g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset));
212  } else if (g.CanBeImmediate(base, mode)) {
213    Emit(opcode | AddressingModeField::encode(kMode_MRI),
214         g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base));
215  } else {
216    Emit(opcode | AddressingModeField::encode(kMode_MRR),
217         g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset));
218  }
219}
220
221
222void InstructionSelector::VisitStore(Node* node) {
223  PPCOperandGenerator g(this);
224  Node* base = node->InputAt(0);
225  Node* offset = node->InputAt(1);
226  Node* value = node->InputAt(2);
227
228  StoreRepresentation store_rep = StoreRepresentationOf(node->op());
229  WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
230  MachineRepresentation rep = store_rep.representation();
231
232  if (write_barrier_kind != kNoWriteBarrier) {
233    DCHECK_EQ(MachineRepresentation::kTagged, rep);
234    AddressingMode addressing_mode;
235    InstructionOperand inputs[3];
236    size_t input_count = 0;
237    inputs[input_count++] = g.UseUniqueRegister(base);
238    // OutOfLineRecordWrite uses the offset in an 'add' instruction as well as
239    // for the store itself, so we must check compatibility with both.
240    if (g.CanBeImmediate(offset, kInt16Imm)
241#if V8_TARGET_ARCH_PPC64
242        && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)
243#endif
244            ) {
245      inputs[input_count++] = g.UseImmediate(offset);
246      addressing_mode = kMode_MRI;
247    } else {
248      inputs[input_count++] = g.UseUniqueRegister(offset);
249      addressing_mode = kMode_MRR;
250    }
251    inputs[input_count++] = g.UseUniqueRegister(value);
252    RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
253    switch (write_barrier_kind) {
254      case kNoWriteBarrier:
255        UNREACHABLE();
256        break;
257      case kMapWriteBarrier:
258        record_write_mode = RecordWriteMode::kValueIsMap;
259        break;
260      case kPointerWriteBarrier:
261        record_write_mode = RecordWriteMode::kValueIsPointer;
262        break;
263      case kFullWriteBarrier:
264        record_write_mode = RecordWriteMode::kValueIsAny;
265        break;
266    }
267    InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
268    size_t const temp_count = arraysize(temps);
269    InstructionCode code = kArchStoreWithWriteBarrier;
270    code |= AddressingModeField::encode(addressing_mode);
271    code |= MiscField::encode(static_cast<int>(record_write_mode));
272    Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
273  } else {
274    ArchOpcode opcode = kArchNop;
275    ImmediateMode mode = kInt16Imm;
276    switch (rep) {
277      case MachineRepresentation::kFloat32:
278        opcode = kPPC_StoreFloat32;
279        break;
280      case MachineRepresentation::kFloat64:
281        opcode = kPPC_StoreDouble;
282        break;
283      case MachineRepresentation::kBit:  // Fall through.
284      case MachineRepresentation::kWord8:
285        opcode = kPPC_StoreWord8;
286        break;
287      case MachineRepresentation::kWord16:
288        opcode = kPPC_StoreWord16;
289        break;
290#if !V8_TARGET_ARCH_PPC64
291      case MachineRepresentation::kTagged:  // Fall through.
292#endif
293      case MachineRepresentation::kWord32:
294        opcode = kPPC_StoreWord32;
295        break;
296#if V8_TARGET_ARCH_PPC64
297      case MachineRepresentation::kTagged:  // Fall through.
298      case MachineRepresentation::kWord64:
299        opcode = kPPC_StoreWord64;
300        mode = kInt16Imm_4ByteAligned;
301        break;
302#else
303      case MachineRepresentation::kWord64:  // Fall through.
304#endif
305      case MachineRepresentation::kSimd128:  // Fall through.
306      case MachineRepresentation::kNone:
307        UNREACHABLE();
308        return;
309    }
310    if (g.CanBeImmediate(offset, mode)) {
311      Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
312           g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value));
313    } else if (g.CanBeImmediate(base, mode)) {
314      Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
315           g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value));
316    } else {
317      Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
318           g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value));
319    }
320  }
321}
322
323
324void InstructionSelector::VisitCheckedLoad(Node* node) {
325  CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
326  PPCOperandGenerator g(this);
327  Node* const base = node->InputAt(0);
328  Node* const offset = node->InputAt(1);
329  Node* const length = node->InputAt(2);
330  ArchOpcode opcode = kArchNop;
331  switch (load_rep.representation()) {
332    case MachineRepresentation::kWord8:
333      opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
334      break;
335    case MachineRepresentation::kWord16:
336      opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
337      break;
338    case MachineRepresentation::kWord32:
339      opcode = kCheckedLoadWord32;
340      break;
341#if V8_TARGET_ARCH_PPC64
342    case MachineRepresentation::kWord64:
343      opcode = kCheckedLoadWord64;
344      break;
345#endif
346    case MachineRepresentation::kFloat32:
347      opcode = kCheckedLoadFloat32;
348      break;
349    case MachineRepresentation::kFloat64:
350      opcode = kCheckedLoadFloat64;
351      break;
352    case MachineRepresentation::kBit:     // Fall through.
353    case MachineRepresentation::kTagged:  // Fall through.
354#if !V8_TARGET_ARCH_PPC64
355    case MachineRepresentation::kWord64:  // Fall through.
356#endif
357    case MachineRepresentation::kSimd128:  // Fall through.
358    case MachineRepresentation::kNone:
359      UNREACHABLE();
360      return;
361  }
362  AddressingMode addressingMode = kMode_MRR;
363  Emit(opcode | AddressingModeField::encode(addressingMode),
364       g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset),
365       g.UseOperand(length, kInt16Imm_Unsigned));
366}
367
368
369void InstructionSelector::VisitCheckedStore(Node* node) {
370  MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
371  PPCOperandGenerator g(this);
372  Node* const base = node->InputAt(0);
373  Node* const offset = node->InputAt(1);
374  Node* const length = node->InputAt(2);
375  Node* const value = node->InputAt(3);
376  ArchOpcode opcode = kArchNop;
377  switch (rep) {
378    case MachineRepresentation::kWord8:
379      opcode = kCheckedStoreWord8;
380      break;
381    case MachineRepresentation::kWord16:
382      opcode = kCheckedStoreWord16;
383      break;
384    case MachineRepresentation::kWord32:
385      opcode = kCheckedStoreWord32;
386      break;
387#if V8_TARGET_ARCH_PPC64
388    case MachineRepresentation::kWord64:
389      opcode = kCheckedStoreWord64;
390      break;
391#endif
392    case MachineRepresentation::kFloat32:
393      opcode = kCheckedStoreFloat32;
394      break;
395    case MachineRepresentation::kFloat64:
396      opcode = kCheckedStoreFloat64;
397      break;
398    case MachineRepresentation::kBit:     // Fall through.
399    case MachineRepresentation::kTagged:  // Fall through.
400#if !V8_TARGET_ARCH_PPC64
401    case MachineRepresentation::kWord64:  // Fall through.
402#endif
403    case MachineRepresentation::kSimd128:  // Fall through.
404    case MachineRepresentation::kNone:
405      UNREACHABLE();
406      return;
407  }
408  AddressingMode addressingMode = kMode_MRR;
409  Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(),
410       g.UseRegister(base), g.UseRegister(offset),
411       g.UseOperand(length, kInt16Imm_Unsigned), g.UseRegister(value));
412}
413
414
415template <typename Matcher>
416static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
417                         ArchOpcode opcode, bool left_can_cover,
418                         bool right_can_cover, ImmediateMode imm_mode) {
419  PPCOperandGenerator g(selector);
420
421  // Map instruction to equivalent operation with inverted right input.
422  ArchOpcode inv_opcode = opcode;
423  switch (opcode) {
424    case kPPC_And:
425      inv_opcode = kPPC_AndComplement;
426      break;
427    case kPPC_Or:
428      inv_opcode = kPPC_OrComplement;
429      break;
430    default:
431      UNREACHABLE();
432  }
433
434  // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
435  if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
436    Matcher mleft(m->left().node());
437    if (mleft.right().Is(-1)) {
438      selector->Emit(inv_opcode, g.DefineAsRegister(node),
439                     g.UseRegister(m->right().node()),
440                     g.UseRegister(mleft.left().node()));
441      return;
442    }
443  }
444
445  // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
446  if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
447      right_can_cover) {
448    Matcher mright(m->right().node());
449    if (mright.right().Is(-1)) {
450      // TODO(all): support shifted operand on right.
451      selector->Emit(inv_opcode, g.DefineAsRegister(node),
452                     g.UseRegister(m->left().node()),
453                     g.UseRegister(mright.left().node()));
454      return;
455    }
456  }
457
458  VisitBinop<Matcher>(selector, node, opcode, imm_mode);
459}
460
461
462static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
463  int mask_width = base::bits::CountPopulation32(value);
464  int mask_msb = base::bits::CountLeadingZeros32(value);
465  int mask_lsb = base::bits::CountTrailingZeros32(value);
466  if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
467    return false;
468  *mb = mask_lsb + mask_width - 1;
469  *me = mask_lsb;
470  return true;
471}
472
473
474#if V8_TARGET_ARCH_PPC64
475static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
476  int mask_width = base::bits::CountPopulation64(value);
477  int mask_msb = base::bits::CountLeadingZeros64(value);
478  int mask_lsb = base::bits::CountTrailingZeros64(value);
479  if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
480    return false;
481  *mb = mask_lsb + mask_width - 1;
482  *me = mask_lsb;
483  return true;
484}
485#endif
486
487
488// TODO(mbrandy): Absorb rotate-right into rlwinm?
489void InstructionSelector::VisitWord32And(Node* node) {
490  PPCOperandGenerator g(this);
491  Int32BinopMatcher m(node);
492  int mb = 0;
493  int me = 0;
494  if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
495    int sh = 0;
496    Node* left = m.left().node();
497    if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
498        CanCover(node, left)) {
499      // Try to absorb left/right shift into rlwinm
500      Int32BinopMatcher mleft(m.left().node());
501      if (mleft.right().IsInRange(0, 31)) {
502        left = mleft.left().node();
503        sh = mleft.right().Value();
504        if (m.left().IsWord32Shr()) {
505          // Adjust the mask such that it doesn't include any rotated bits.
506          if (mb > 31 - sh) mb = 31 - sh;
507          sh = (32 - sh) & 0x1f;
508        } else {
509          // Adjust the mask such that it doesn't include any rotated bits.
510          if (me < sh) me = sh;
511        }
512      }
513    }
514    if (mb >= me) {
515      Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node), g.UseRegister(left),
516           g.TempImmediate(sh), g.TempImmediate(mb), g.TempImmediate(me));
517      return;
518    }
519  }
520  VisitLogical<Int32BinopMatcher>(
521      this, node, &m, kPPC_And, CanCover(node, m.left().node()),
522      CanCover(node, m.right().node()), kInt16Imm_Unsigned);
523}
524
525
526#if V8_TARGET_ARCH_PPC64
527// TODO(mbrandy): Absorb rotate-right into rldic?
528void InstructionSelector::VisitWord64And(Node* node) {
529  PPCOperandGenerator g(this);
530  Int64BinopMatcher m(node);
531  int mb = 0;
532  int me = 0;
533  if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
534    int sh = 0;
535    Node* left = m.left().node();
536    if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
537        CanCover(node, left)) {
538      // Try to absorb left/right shift into rldic
539      Int64BinopMatcher mleft(m.left().node());
540      if (mleft.right().IsInRange(0, 63)) {
541        left = mleft.left().node();
542        sh = mleft.right().Value();
543        if (m.left().IsWord64Shr()) {
544          // Adjust the mask such that it doesn't include any rotated bits.
545          if (mb > 63 - sh) mb = 63 - sh;
546          sh = (64 - sh) & 0x3f;
547        } else {
548          // Adjust the mask such that it doesn't include any rotated bits.
549          if (me < sh) me = sh;
550        }
551      }
552    }
553    if (mb >= me) {
554      bool match = false;
555      ArchOpcode opcode;
556      int mask;
557      if (me == 0) {
558        match = true;
559        opcode = kPPC_RotLeftAndClearLeft64;
560        mask = mb;
561      } else if (mb == 63) {
562        match = true;
563        opcode = kPPC_RotLeftAndClearRight64;
564        mask = me;
565      } else if (sh && me <= sh && m.left().IsWord64Shl()) {
566        match = true;
567        opcode = kPPC_RotLeftAndClear64;
568        mask = mb;
569      }
570      if (match) {
571        Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
572             g.TempImmediate(sh), g.TempImmediate(mask));
573        return;
574      }
575    }
576  }
577  VisitLogical<Int64BinopMatcher>(
578      this, node, &m, kPPC_And, CanCover(node, m.left().node()),
579      CanCover(node, m.right().node()), kInt16Imm_Unsigned);
580}
581#endif
582
583
584void InstructionSelector::VisitWord32Or(Node* node) {
585  Int32BinopMatcher m(node);
586  VisitLogical<Int32BinopMatcher>(
587      this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
588      CanCover(node, m.right().node()), kInt16Imm_Unsigned);
589}
590
591
592#if V8_TARGET_ARCH_PPC64
593void InstructionSelector::VisitWord64Or(Node* node) {
594  Int64BinopMatcher m(node);
595  VisitLogical<Int64BinopMatcher>(
596      this, node, &m, kPPC_Or, CanCover(node, m.left().node()),
597      CanCover(node, m.right().node()), kInt16Imm_Unsigned);
598}
599#endif
600
601
602void InstructionSelector::VisitWord32Xor(Node* node) {
603  PPCOperandGenerator g(this);
604  Int32BinopMatcher m(node);
605  if (m.right().Is(-1)) {
606    Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
607  } else {
608    VisitBinop<Int32BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
609  }
610}
611
612
613#if V8_TARGET_ARCH_PPC64
614void InstructionSelector::VisitWord64Xor(Node* node) {
615  PPCOperandGenerator g(this);
616  Int64BinopMatcher m(node);
617  if (m.right().Is(-1)) {
618    Emit(kPPC_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
619  } else {
620    VisitBinop<Int64BinopMatcher>(this, node, kPPC_Xor, kInt16Imm_Unsigned);
621  }
622}
623#endif
624
625
626void InstructionSelector::VisitWord32Shl(Node* node) {
627  PPCOperandGenerator g(this);
628  Int32BinopMatcher m(node);
629  if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
630    // Try to absorb logical-and into rlwinm
631    Int32BinopMatcher mleft(m.left().node());
632    int sh = m.right().Value();
633    int mb;
634    int me;
635    if (mleft.right().HasValue() &&
636        IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
637      // Adjust the mask such that it doesn't include any rotated bits.
638      if (me < sh) me = sh;
639      if (mb >= me) {
640        Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
641             g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
642             g.TempImmediate(mb), g.TempImmediate(me));
643        return;
644      }
645    }
646  }
647  VisitRRO(this, kPPC_ShiftLeft32, node, kShift32Imm);
648}
649
650
651#if V8_TARGET_ARCH_PPC64
652void InstructionSelector::VisitWord64Shl(Node* node) {
653  PPCOperandGenerator g(this);
654  Int64BinopMatcher m(node);
655  // TODO(mbrandy): eliminate left sign extension if right >= 32
656  if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
657    // Try to absorb logical-and into rldic
658    Int64BinopMatcher mleft(m.left().node());
659    int sh = m.right().Value();
660    int mb;
661    int me;
662    if (mleft.right().HasValue() &&
663        IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
664      // Adjust the mask such that it doesn't include any rotated bits.
665      if (me < sh) me = sh;
666      if (mb >= me) {
667        bool match = false;
668        ArchOpcode opcode;
669        int mask;
670        if (me == 0) {
671          match = true;
672          opcode = kPPC_RotLeftAndClearLeft64;
673          mask = mb;
674        } else if (mb == 63) {
675          match = true;
676          opcode = kPPC_RotLeftAndClearRight64;
677          mask = me;
678        } else if (sh && me <= sh) {
679          match = true;
680          opcode = kPPC_RotLeftAndClear64;
681          mask = mb;
682        }
683        if (match) {
684          Emit(opcode, g.DefineAsRegister(node),
685               g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
686               g.TempImmediate(mask));
687          return;
688        }
689      }
690    }
691  }
692  VisitRRO(this, kPPC_ShiftLeft64, node, kShift64Imm);
693}
694#endif
695
696
697void InstructionSelector::VisitWord32Shr(Node* node) {
698  PPCOperandGenerator g(this);
699  Int32BinopMatcher m(node);
700  if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
701    // Try to absorb logical-and into rlwinm
702    Int32BinopMatcher mleft(m.left().node());
703    int sh = m.right().Value();
704    int mb;
705    int me;
706    if (mleft.right().HasValue() &&
707        IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
708      // Adjust the mask such that it doesn't include any rotated bits.
709      if (mb > 31 - sh) mb = 31 - sh;
710      sh = (32 - sh) & 0x1f;
711      if (mb >= me) {
712        Emit(kPPC_RotLeftAndMask32, g.DefineAsRegister(node),
713             g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
714             g.TempImmediate(mb), g.TempImmediate(me));
715        return;
716      }
717    }
718  }
719  VisitRRO(this, kPPC_ShiftRight32, node, kShift32Imm);
720}
721
722#if V8_TARGET_ARCH_PPC64
723void InstructionSelector::VisitWord64Shr(Node* node) {
724  PPCOperandGenerator g(this);
725  Int64BinopMatcher m(node);
726  if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
727    // Try to absorb logical-and into rldic
728    Int64BinopMatcher mleft(m.left().node());
729    int sh = m.right().Value();
730    int mb;
731    int me;
732    if (mleft.right().HasValue() &&
733        IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
734      // Adjust the mask such that it doesn't include any rotated bits.
735      if (mb > 63 - sh) mb = 63 - sh;
736      sh = (64 - sh) & 0x3f;
737      if (mb >= me) {
738        bool match = false;
739        ArchOpcode opcode;
740        int mask;
741        if (me == 0) {
742          match = true;
743          opcode = kPPC_RotLeftAndClearLeft64;
744          mask = mb;
745        } else if (mb == 63) {
746          match = true;
747          opcode = kPPC_RotLeftAndClearRight64;
748          mask = me;
749        }
750        if (match) {
751          Emit(opcode, g.DefineAsRegister(node),
752               g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
753               g.TempImmediate(mask));
754          return;
755        }
756      }
757    }
758  }
759  VisitRRO(this, kPPC_ShiftRight64, node, kShift64Imm);
760}
761#endif
762
763
764void InstructionSelector::VisitWord32Sar(Node* node) {
765  PPCOperandGenerator g(this);
766  Int32BinopMatcher m(node);
767  // Replace with sign extension for (x << K) >> K where K is 16 or 24.
768  if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
769    Int32BinopMatcher mleft(m.left().node());
770    if (mleft.right().Is(16) && m.right().Is(16)) {
771      Emit(kPPC_ExtendSignWord16, g.DefineAsRegister(node),
772           g.UseRegister(mleft.left().node()));
773      return;
774    } else if (mleft.right().Is(24) && m.right().Is(24)) {
775      Emit(kPPC_ExtendSignWord8, g.DefineAsRegister(node),
776           g.UseRegister(mleft.left().node()));
777      return;
778    }
779  }
780  VisitRRO(this, kPPC_ShiftRightAlg32, node, kShift32Imm);
781}
782
783#if !V8_TARGET_ARCH_PPC64
784void VisitPairBinop(InstructionSelector* selector, InstructionCode opcode,
785                    Node* node) {
786  PPCOperandGenerator g(selector);
787
788  // We use UseUniqueRegister here to avoid register sharing with the output
789  // registers.
790  InstructionOperand inputs[] = {
791      g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
792      g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
793
794  InstructionOperand outputs[] = {
795      g.DefineAsRegister(node),
796      g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
797
798  selector->Emit(opcode, 2, outputs, 4, inputs);
799}
800
801void InstructionSelector::VisitInt32PairAdd(Node* node) {
802  VisitPairBinop(this, kPPC_AddPair, node);
803}
804
805void InstructionSelector::VisitInt32PairSub(Node* node) {
806  VisitPairBinop(this, kPPC_SubPair, node);
807}
808
809void InstructionSelector::VisitInt32PairMul(Node* node) {
810  PPCOperandGenerator g(this);
811  InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
812                                 g.UseUniqueRegister(node->InputAt(1)),
813                                 g.UseUniqueRegister(node->InputAt(2)),
814                                 g.UseRegister(node->InputAt(3))};
815
816  InstructionOperand outputs[] = {
817      g.DefineAsRegister(node),
818      g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
819
820  InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
821
822  Emit(kPPC_MulPair, 2, outputs, 4, inputs, 2, temps);
823}
824
825void VisitPairShift(InstructionSelector* selector, InstructionCode opcode,
826                    Node* node) {
827  PPCOperandGenerator g(selector);
828  Int32Matcher m(node->InputAt(2));
829  InstructionOperand shift_operand;
830  if (m.HasValue()) {
831    shift_operand = g.UseImmediate(m.node());
832  } else {
833    shift_operand = g.UseUniqueRegister(m.node());
834  }
835
836  InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0)),
837                                 g.UseRegister(node->InputAt(1)),
838                                 shift_operand};
839
840  InstructionOperand outputs[] = {
841      g.DefineSameAsFirst(node),
842      g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
843
844  selector->Emit(opcode, 2, outputs, 3, inputs);
845}
846
847void InstructionSelector::VisitWord32PairShl(Node* node) {
848  VisitPairShift(this, kPPC_ShiftLeftPair, node);
849}
850
851void InstructionSelector::VisitWord32PairShr(Node* node) {
852  VisitPairShift(this, kPPC_ShiftRightPair, node);
853}
854
855void InstructionSelector::VisitWord32PairSar(Node* node) {
856  VisitPairShift(this, kPPC_ShiftRightAlgPair, node);
857}
858#endif
859
860#if V8_TARGET_ARCH_PPC64
861void InstructionSelector::VisitWord64Sar(Node* node) {
862  PPCOperandGenerator g(this);
863  Int64BinopMatcher m(node);
864  if (CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
865      m.right().Is(32)) {
866    // Just load and sign-extend the interesting 4 bytes instead. This happens,
867    // for example, when we're loading and untagging SMIs.
868    BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(), true);
869    if (mleft.matches() && mleft.index() == nullptr) {
870      int64_t offset = 0;
871      Node* displacement = mleft.displacement();
872      if (displacement != nullptr) {
873        Int64Matcher mdisplacement(displacement);
874        DCHECK(mdisplacement.HasValue());
875        offset = mdisplacement.Value();
876      }
877      offset = SmiWordOffset(offset);
878      if (g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)) {
879        Emit(kPPC_LoadWordS32 | AddressingModeField::encode(kMode_MRI),
880             g.DefineAsRegister(node), g.UseRegister(mleft.base()),
881             g.TempImmediate(offset));
882        return;
883      }
884    }
885  }
886  VisitRRO(this, kPPC_ShiftRightAlg64, node, kShift64Imm);
887}
888#endif
889
890
891// TODO(mbrandy): Absorb logical-and into rlwinm?
892void InstructionSelector::VisitWord32Ror(Node* node) {
893  VisitRRO(this, kPPC_RotRight32, node, kShift32Imm);
894}
895
896
897#if V8_TARGET_ARCH_PPC64
898// TODO(mbrandy): Absorb logical-and into rldic?
899void InstructionSelector::VisitWord64Ror(Node* node) {
900  VisitRRO(this, kPPC_RotRight64, node, kShift64Imm);
901}
902#endif
903
904
905void InstructionSelector::VisitWord32Clz(Node* node) {
906  PPCOperandGenerator g(this);
907  Emit(kPPC_Cntlz32, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
908}
909
910
911#if V8_TARGET_ARCH_PPC64
912void InstructionSelector::VisitWord64Clz(Node* node) {
913  PPCOperandGenerator g(this);
914  Emit(kPPC_Cntlz64, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
915}
916#endif
917
918
919void InstructionSelector::VisitWord32Popcnt(Node* node) {
920  PPCOperandGenerator g(this);
921  Emit(kPPC_Popcnt32, g.DefineAsRegister(node),
922       g.UseRegister(node->InputAt(0)));
923}
924
925
926#if V8_TARGET_ARCH_PPC64
927void InstructionSelector::VisitWord64Popcnt(Node* node) {
928  PPCOperandGenerator g(this);
929  Emit(kPPC_Popcnt64, g.DefineAsRegister(node),
930       g.UseRegister(node->InputAt(0)));
931}
932#endif
933
934
935void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
936
937
938#if V8_TARGET_ARCH_PPC64
939void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
940#endif
941
942
943void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
944
945
946#if V8_TARGET_ARCH_PPC64
947void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
948#endif
949
950
951void InstructionSelector::VisitInt32Add(Node* node) {
952  VisitBinop<Int32BinopMatcher>(this, node, kPPC_Add, kInt16Imm);
953}
954
955
956#if V8_TARGET_ARCH_PPC64
957void InstructionSelector::VisitInt64Add(Node* node) {
958  VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add, kInt16Imm);
959}
960#endif
961
962void InstructionSelector::VisitInt32Sub(Node* node) {
963  PPCOperandGenerator g(this);
964  Int32BinopMatcher m(node);
965  if (m.left().Is(0)) {
966    Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
967  } else {
968    VisitBinop<Int32BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
969  }
970}
971
972
973#if V8_TARGET_ARCH_PPC64
974void InstructionSelector::VisitInt64Sub(Node* node) {
975  PPCOperandGenerator g(this);
976  Int64BinopMatcher m(node);
977  if (m.left().Is(0)) {
978    Emit(kPPC_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
979  } else {
980    VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate);
981  }
982}
983#endif
984
985
986void InstructionSelector::VisitInt32Mul(Node* node) {
987  VisitRRR(this, kPPC_Mul32, node);
988}
989
990
991#if V8_TARGET_ARCH_PPC64
992void InstructionSelector::VisitInt64Mul(Node* node) {
993  VisitRRR(this, kPPC_Mul64, node);
994}
995#endif
996
997
998void InstructionSelector::VisitInt32MulHigh(Node* node) {
999  PPCOperandGenerator g(this);
1000  Emit(kPPC_MulHigh32, g.DefineAsRegister(node),
1001       g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1002}
1003
1004
1005void InstructionSelector::VisitUint32MulHigh(Node* node) {
1006  PPCOperandGenerator g(this);
1007  Emit(kPPC_MulHighU32, g.DefineAsRegister(node),
1008       g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
1009}
1010
1011
1012void InstructionSelector::VisitInt32Div(Node* node) {
1013  VisitRRR(this, kPPC_Div32, node);
1014}
1015
1016
1017#if V8_TARGET_ARCH_PPC64
1018void InstructionSelector::VisitInt64Div(Node* node) {
1019  VisitRRR(this, kPPC_Div64, node);
1020}
1021#endif
1022
1023
1024void InstructionSelector::VisitUint32Div(Node* node) {
1025  VisitRRR(this, kPPC_DivU32, node);
1026}
1027
1028
1029#if V8_TARGET_ARCH_PPC64
1030void InstructionSelector::VisitUint64Div(Node* node) {
1031  VisitRRR(this, kPPC_DivU64, node);
1032}
1033#endif
1034
1035
1036void InstructionSelector::VisitInt32Mod(Node* node) {
1037  VisitRRR(this, kPPC_Mod32, node);
1038}
1039
1040
1041#if V8_TARGET_ARCH_PPC64
1042void InstructionSelector::VisitInt64Mod(Node* node) {
1043  VisitRRR(this, kPPC_Mod64, node);
1044}
1045#endif
1046
1047
1048void InstructionSelector::VisitUint32Mod(Node* node) {
1049  VisitRRR(this, kPPC_ModU32, node);
1050}
1051
1052
1053#if V8_TARGET_ARCH_PPC64
1054void InstructionSelector::VisitUint64Mod(Node* node) {
1055  VisitRRR(this, kPPC_ModU64, node);
1056}
1057#endif
1058
1059
1060void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
1061  VisitRR(this, kPPC_Float32ToDouble, node);
1062}
1063
1064
1065void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1066  VisitRR(this, kPPC_Int32ToFloat32, node);
1067}
1068
1069
1070void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1071  VisitRR(this, kPPC_Uint32ToFloat32, node);
1072}
1073
1074
1075void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
1076  VisitRR(this, kPPC_Int32ToDouble, node);
1077}
1078
1079
1080void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
1081  VisitRR(this, kPPC_Uint32ToDouble, node);
1082}
1083
1084
1085void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
1086  VisitRR(this, kPPC_DoubleToInt32, node);
1087}
1088
1089
1090void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
1091  VisitRR(this, kPPC_DoubleToUint32, node);
1092}
1093
1094void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1095  VisitRR(this, kPPC_DoubleToUint32, node);
1096}
1097
1098#if V8_TARGET_ARCH_PPC64
1099void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1100  VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1101}
1102
1103
1104void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1105  VisitTryTruncateDouble(this, kPPC_DoubleToInt64, node);
1106}
1107
1108
1109void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1110  VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1111}
1112
1113
1114void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1115  VisitTryTruncateDouble(this, kPPC_DoubleToUint64, node);
1116}
1117
1118
1119void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1120  // TODO(mbrandy): inspect input to see if nop is appropriate.
1121  VisitRR(this, kPPC_ExtendSignWord32, node);
1122}
1123
1124
1125void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1126  // TODO(mbrandy): inspect input to see if nop is appropriate.
1127  VisitRR(this, kPPC_Uint32ToUint64, node);
1128}
1129#endif
1130
1131
1132void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1133  VisitRR(this, kPPC_DoubleToFloat32, node);
1134}
1135
1136void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1137  VisitRR(this, kArchTruncateDoubleToI, node);
1138}
1139
1140void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1141  VisitRR(this, kPPC_DoubleToInt32, node);
1142}
1143
1144
1145void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1146  VisitRR(this, kPPC_DoubleToInt32, node);
1147}
1148
1149
1150void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1151  VisitRR(this, kPPC_DoubleToUint32, node);
1152}
1153
1154
1155#if V8_TARGET_ARCH_PPC64
1156void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1157  // TODO(mbrandy): inspect input to see if nop is appropriate.
1158  VisitRR(this, kPPC_Int64ToInt32, node);
1159}
1160
1161
1162void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1163  VisitRR(this, kPPC_Int64ToFloat32, node);
1164}
1165
1166
1167void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1168  VisitRR(this, kPPC_Int64ToDouble, node);
1169}
1170
1171
1172void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1173  VisitRR(this, kPPC_Uint64ToFloat32, node);
1174}
1175
1176
1177void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1178  VisitRR(this, kPPC_Uint64ToDouble, node);
1179}
1180#endif
1181
1182
1183void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1184  VisitRR(this, kPPC_BitcastFloat32ToInt32, node);
1185}
1186
1187
1188#if V8_TARGET_ARCH_PPC64
1189void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1190  VisitRR(this, kPPC_BitcastDoubleToInt64, node);
1191}
1192#endif
1193
1194
1195void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1196  VisitRR(this, kPPC_BitcastInt32ToFloat32, node);
1197}
1198
1199
1200#if V8_TARGET_ARCH_PPC64
1201void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1202  VisitRR(this, kPPC_BitcastInt64ToDouble, node);
1203}
1204#endif
1205
1206
1207void InstructionSelector::VisitFloat32Add(Node* node) {
1208  VisitRRR(this, kPPC_AddDouble | MiscField::encode(1), node);
1209}
1210
1211
1212void InstructionSelector::VisitFloat64Add(Node* node) {
1213  // TODO(mbrandy): detect multiply-add
1214  VisitRRR(this, kPPC_AddDouble, node);
1215}
1216
1217
1218void InstructionSelector::VisitFloat32Sub(Node* node) {
1219  PPCOperandGenerator g(this);
1220  Float32BinopMatcher m(node);
1221  if (m.left().IsMinusZero()) {
1222    Emit(kPPC_NegDouble | MiscField::encode(1), g.DefineAsRegister(node),
1223         g.UseRegister(m.right().node()));
1224    return;
1225  }
1226  VisitRRR(this, kPPC_SubDouble | MiscField::encode(1), node);
1227}
1228
1229void InstructionSelector::VisitFloat32SubPreserveNan(Node* node) {
1230  PPCOperandGenerator g(this);
1231  VisitRRR(this, kPPC_SubDouble | MiscField::encode(1), node);
1232}
1233
1234void InstructionSelector::VisitFloat64Sub(Node* node) {
1235  // TODO(mbrandy): detect multiply-subtract
1236  PPCOperandGenerator g(this);
1237  Float64BinopMatcher m(node);
1238  if (m.left().IsMinusZero()) {
1239    if (m.right().IsFloat64RoundDown() &&
1240        CanCover(m.node(), m.right().node())) {
1241      if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1242          CanCover(m.right().node(), m.right().InputAt(0))) {
1243        Float64BinopMatcher mright0(m.right().InputAt(0));
1244        if (mright0.left().IsMinusZero()) {
1245          // -floor(-x) = ceil(x)
1246          Emit(kPPC_CeilDouble, g.DefineAsRegister(node),
1247               g.UseRegister(mright0.right().node()));
1248          return;
1249        }
1250      }
1251    }
1252    Emit(kPPC_NegDouble, g.DefineAsRegister(node),
1253         g.UseRegister(m.right().node()));
1254    return;
1255  }
1256  VisitRRR(this, kPPC_SubDouble, node);
1257}
1258
1259void InstructionSelector::VisitFloat64SubPreserveNan(Node* node) {
1260  VisitRRR(this, kPPC_SubDouble, node);
1261}
1262
1263void InstructionSelector::VisitFloat32Mul(Node* node) {
1264  VisitRRR(this, kPPC_MulDouble | MiscField::encode(1), node);
1265}
1266
1267
1268void InstructionSelector::VisitFloat64Mul(Node* node) {
1269  // TODO(mbrandy): detect negate
1270  VisitRRR(this, kPPC_MulDouble, node);
1271}
1272
1273
1274void InstructionSelector::VisitFloat32Div(Node* node) {
1275  VisitRRR(this, kPPC_DivDouble | MiscField::encode(1), node);
1276}
1277
1278
1279void InstructionSelector::VisitFloat64Div(Node* node) {
1280  VisitRRR(this, kPPC_DivDouble, node);
1281}
1282
1283
1284void InstructionSelector::VisitFloat64Mod(Node* node) {
1285  PPCOperandGenerator g(this);
1286  Emit(kPPC_ModDouble, g.DefineAsFixed(node, d1),
1287       g.UseFixed(node->InputAt(0), d1),
1288       g.UseFixed(node->InputAt(1), d2))->MarkAsCall();
1289}
1290
1291
1292void InstructionSelector::VisitFloat32Max(Node* node) { UNREACHABLE(); }
1293
1294
1295void InstructionSelector::VisitFloat64Max(Node* node) { UNREACHABLE(); }
1296
1297void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
1298  VisitRR(this, kPPC_Float64SilenceNaN, node);
1299}
1300
1301
1302void InstructionSelector::VisitFloat32Min(Node* node) { UNREACHABLE(); }
1303
1304
1305void InstructionSelector::VisitFloat64Min(Node* node) { UNREACHABLE(); }
1306
1307
1308void InstructionSelector::VisitFloat32Abs(Node* node) {
1309  VisitRR(this, kPPC_AbsDouble | MiscField::encode(1), node);
1310}
1311
1312
1313void InstructionSelector::VisitFloat64Abs(Node* node) {
1314  VisitRR(this, kPPC_AbsDouble, node);
1315}
1316
1317void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1318  VisitRR(this, kPPC_SqrtDouble | MiscField::encode(1), node);
1319}
1320
1321void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1322                                                  InstructionCode opcode) {
1323  PPCOperandGenerator g(this);
1324  Emit(opcode, g.DefineAsFixed(node, d1), g.UseFixed(node->InputAt(0), d1))
1325       ->MarkAsCall();
1326}
1327
1328void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1329                                                  InstructionCode opcode) {
1330  PPCOperandGenerator g(this);
1331  Emit(opcode, g.DefineAsFixed(node, d1),
1332       g.UseFixed(node->InputAt(0), d1),
1333       g.UseFixed(node->InputAt(1), d2))->MarkAsCall();
1334}
1335
1336void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1337  VisitRR(this, kPPC_SqrtDouble, node);
1338}
1339
1340
1341void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1342  VisitRR(this, kPPC_FloorDouble | MiscField::encode(1), node);
1343}
1344
1345
1346void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1347  VisitRR(this, kPPC_FloorDouble, node);
1348}
1349
1350
1351void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1352  VisitRR(this, kPPC_CeilDouble | MiscField::encode(1), node);
1353}
1354
1355
1356void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1357  VisitRR(this, kPPC_CeilDouble, node);
1358}
1359
1360
1361void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1362  VisitRR(this, kPPC_TruncateDouble | MiscField::encode(1), node);
1363}
1364
1365
1366void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1367  VisitRR(this, kPPC_TruncateDouble, node);
1368}
1369
1370
1371void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1372  VisitRR(this, kPPC_RoundDouble, node);
1373}
1374
1375
1376void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1377  UNREACHABLE();
1378}
1379
1380
1381void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1382  UNREACHABLE();
1383}
1384
1385void InstructionSelector::VisitFloat32Neg(Node* node) { UNREACHABLE(); }
1386
1387void InstructionSelector::VisitFloat64Neg(Node* node) { UNREACHABLE(); }
1388
1389void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1390  if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1391    FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1392    return VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32,
1393                                         kInt16Imm, &cont);
1394  }
1395  FlagsContinuation cont;
1396  VisitBinop<Int32BinopMatcher>(this, node, kPPC_AddWithOverflow32, kInt16Imm,
1397                                &cont);
1398}
1399
1400
1401void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1402  if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1403    FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1404    return VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1405                                         kInt16Imm_Negate, &cont);
1406  }
1407  FlagsContinuation cont;
1408  VisitBinop<Int32BinopMatcher>(this, node, kPPC_SubWithOverflow32,
1409                                kInt16Imm_Negate, &cont);
1410}
1411
1412
1413#if V8_TARGET_ARCH_PPC64
1414void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1415  if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1416    FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1417    return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add, kInt16Imm,
1418                                         &cont);
1419  }
1420  FlagsContinuation cont;
1421  VisitBinop<Int64BinopMatcher>(this, node, kPPC_Add, kInt16Imm, &cont);
1422}
1423
1424
1425void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1426  if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1427    FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1428    return VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate,
1429                                         &cont);
1430  }
1431  FlagsContinuation cont;
1432  VisitBinop<Int64BinopMatcher>(this, node, kPPC_Sub, kInt16Imm_Negate, &cont);
1433}
1434#endif
1435
1436
1437static bool CompareLogical(FlagsContinuation* cont) {
1438  switch (cont->condition()) {
1439    case kUnsignedLessThan:
1440    case kUnsignedGreaterThanOrEqual:
1441    case kUnsignedLessThanOrEqual:
1442    case kUnsignedGreaterThan:
1443      return true;
1444    default:
1445      return false;
1446  }
1447  UNREACHABLE();
1448  return false;
1449}
1450
1451
1452namespace {
1453
1454// Shared routine for multiple compare operations.
1455void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1456                  InstructionOperand left, InstructionOperand right,
1457                  FlagsContinuation* cont) {
1458  PPCOperandGenerator g(selector);
1459  opcode = cont->Encode(opcode);
1460  if (cont->IsBranch()) {
1461    selector->Emit(opcode, g.NoOutput(), left, right,
1462                   g.Label(cont->true_block()), g.Label(cont->false_block()));
1463  } else if (cont->IsDeoptimize()) {
1464    selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1465                             cont->frame_state());
1466  } else {
1467    DCHECK(cont->IsSet());
1468    selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1469  }
1470}
1471
1472
1473// Shared routine for multiple word compare operations.
1474void VisitWordCompare(InstructionSelector* selector, Node* node,
1475                      InstructionCode opcode, FlagsContinuation* cont,
1476                      bool commutative, ImmediateMode immediate_mode) {
1477  PPCOperandGenerator g(selector);
1478  Node* left = node->InputAt(0);
1479  Node* right = node->InputAt(1);
1480
1481  // Match immediates on left or right side of comparison.
1482  if (g.CanBeImmediate(right, immediate_mode)) {
1483    VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1484                 cont);
1485  } else if (g.CanBeImmediate(left, immediate_mode)) {
1486    if (!commutative) cont->Commute();
1487    VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1488                 cont);
1489  } else {
1490    VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1491                 cont);
1492  }
1493}
1494
1495
1496void VisitWord32Compare(InstructionSelector* selector, Node* node,
1497                        FlagsContinuation* cont) {
1498  ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1499  VisitWordCompare(selector, node, kPPC_Cmp32, cont, false, mode);
1500}
1501
1502
1503#if V8_TARGET_ARCH_PPC64
1504void VisitWord64Compare(InstructionSelector* selector, Node* node,
1505                        FlagsContinuation* cont) {
1506  ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1507  VisitWordCompare(selector, node, kPPC_Cmp64, cont, false, mode);
1508}
1509#endif
1510
1511
1512// Shared routine for multiple float32 compare operations.
1513void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1514                         FlagsContinuation* cont) {
1515  PPCOperandGenerator g(selector);
1516  Node* left = node->InputAt(0);
1517  Node* right = node->InputAt(1);
1518  VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1519               g.UseRegister(right), cont);
1520}
1521
1522
1523// Shared routine for multiple float64 compare operations.
1524void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1525                         FlagsContinuation* cont) {
1526  PPCOperandGenerator g(selector);
1527  Node* left = node->InputAt(0);
1528  Node* right = node->InputAt(1);
1529  VisitCompare(selector, kPPC_CmpDouble, g.UseRegister(left),
1530               g.UseRegister(right), cont);
1531}
1532
1533
1534// Shared routine for word comparisons against zero.
1535void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1536                          Node* value, InstructionCode opcode,
1537                          FlagsContinuation* cont) {
1538  while (selector->CanCover(user, value)) {
1539    switch (value->opcode()) {
1540      case IrOpcode::kWord32Equal: {
1541        // Combine with comparisons against 0 by simply inverting the
1542        // continuation.
1543        Int32BinopMatcher m(value);
1544        if (m.right().Is(0)) {
1545          user = value;
1546          value = m.left().node();
1547          cont->Negate();
1548          continue;
1549        }
1550        cont->OverwriteAndNegateIfEqual(kEqual);
1551        return VisitWord32Compare(selector, value, cont);
1552      }
1553      case IrOpcode::kInt32LessThan:
1554        cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1555        return VisitWord32Compare(selector, value, cont);
1556      case IrOpcode::kInt32LessThanOrEqual:
1557        cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1558        return VisitWord32Compare(selector, value, cont);
1559      case IrOpcode::kUint32LessThan:
1560        cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1561        return VisitWord32Compare(selector, value, cont);
1562      case IrOpcode::kUint32LessThanOrEqual:
1563        cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1564        return VisitWord32Compare(selector, value, cont);
1565#if V8_TARGET_ARCH_PPC64
1566      case IrOpcode::kWord64Equal:
1567        cont->OverwriteAndNegateIfEqual(kEqual);
1568        return VisitWord64Compare(selector, value, cont);
1569      case IrOpcode::kInt64LessThan:
1570        cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1571        return VisitWord64Compare(selector, value, cont);
1572      case IrOpcode::kInt64LessThanOrEqual:
1573        cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1574        return VisitWord64Compare(selector, value, cont);
1575      case IrOpcode::kUint64LessThan:
1576        cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1577        return VisitWord64Compare(selector, value, cont);
1578      case IrOpcode::kUint64LessThanOrEqual:
1579        cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1580        return VisitWord64Compare(selector, value, cont);
1581#endif
1582      case IrOpcode::kFloat32Equal:
1583        cont->OverwriteAndNegateIfEqual(kEqual);
1584        return VisitFloat32Compare(selector, value, cont);
1585      case IrOpcode::kFloat32LessThan:
1586        cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1587        return VisitFloat32Compare(selector, value, cont);
1588      case IrOpcode::kFloat32LessThanOrEqual:
1589        cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1590        return VisitFloat32Compare(selector, value, cont);
1591      case IrOpcode::kFloat64Equal:
1592        cont->OverwriteAndNegateIfEqual(kEqual);
1593        return VisitFloat64Compare(selector, value, cont);
1594      case IrOpcode::kFloat64LessThan:
1595        cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1596        return VisitFloat64Compare(selector, value, cont);
1597      case IrOpcode::kFloat64LessThanOrEqual:
1598        cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1599        return VisitFloat64Compare(selector, value, cont);
1600      case IrOpcode::kProjection:
1601        // Check if this is the overflow output projection of an
1602        // <Operation>WithOverflow node.
1603        if (ProjectionIndexOf(value->op()) == 1u) {
1604          // We cannot combine the <Operation>WithOverflow with this branch
1605          // unless the 0th projection (the use of the actual value of the
1606          // <Operation> is either nullptr, which means there's no use of the
1607          // actual value, or was already defined, which means it is scheduled
1608          // *AFTER* this branch).
1609          Node* const node = value->InputAt(0);
1610          Node* const result = NodeProperties::FindProjection(node, 0);
1611          if (result == nullptr || selector->IsDefined(result)) {
1612            switch (node->opcode()) {
1613              case IrOpcode::kInt32AddWithOverflow:
1614                cont->OverwriteAndNegateIfEqual(kOverflow);
1615                return VisitBinop<Int32BinopMatcher>(
1616                    selector, node, kPPC_AddWithOverflow32, kInt16Imm, cont);
1617              case IrOpcode::kInt32SubWithOverflow:
1618                cont->OverwriteAndNegateIfEqual(kOverflow);
1619                return VisitBinop<Int32BinopMatcher>(selector, node,
1620                                                     kPPC_SubWithOverflow32,
1621                                                     kInt16Imm_Negate, cont);
1622#if V8_TARGET_ARCH_PPC64
1623              case IrOpcode::kInt64AddWithOverflow:
1624                cont->OverwriteAndNegateIfEqual(kOverflow);
1625                return VisitBinop<Int64BinopMatcher>(selector, node, kPPC_Add,
1626                                                     kInt16Imm, cont);
1627              case IrOpcode::kInt64SubWithOverflow:
1628                cont->OverwriteAndNegateIfEqual(kOverflow);
1629                return VisitBinop<Int64BinopMatcher>(selector, node, kPPC_Sub,
1630                                                     kInt16Imm_Negate, cont);
1631#endif
1632              default:
1633                break;
1634            }
1635          }
1636        }
1637        break;
1638      case IrOpcode::kInt32Sub:
1639        return VisitWord32Compare(selector, value, cont);
1640      case IrOpcode::kWord32And:
1641        // TODO(mbandy): opportunity for rlwinm?
1642        return VisitWordCompare(selector, value, kPPC_Tst32, cont, true,
1643                                kInt16Imm_Unsigned);
1644// TODO(mbrandy): Handle?
1645// case IrOpcode::kInt32Add:
1646// case IrOpcode::kWord32Or:
1647// case IrOpcode::kWord32Xor:
1648// case IrOpcode::kWord32Sar:
1649// case IrOpcode::kWord32Shl:
1650// case IrOpcode::kWord32Shr:
1651// case IrOpcode::kWord32Ror:
1652#if V8_TARGET_ARCH_PPC64
1653      case IrOpcode::kInt64Sub:
1654        return VisitWord64Compare(selector, value, cont);
1655      case IrOpcode::kWord64And:
1656        // TODO(mbandy): opportunity for rldic?
1657        return VisitWordCompare(selector, value, kPPC_Tst64, cont, true,
1658                                kInt16Imm_Unsigned);
1659// TODO(mbrandy): Handle?
1660// case IrOpcode::kInt64Add:
1661// case IrOpcode::kWord64Or:
1662// case IrOpcode::kWord64Xor:
1663// case IrOpcode::kWord64Sar:
1664// case IrOpcode::kWord64Shl:
1665// case IrOpcode::kWord64Shr:
1666// case IrOpcode::kWord64Ror:
1667#endif
1668      default:
1669        break;
1670    }
1671    break;
1672  }
1673
1674  // Branch could not be combined with a compare, emit compare against 0.
1675  PPCOperandGenerator g(selector);
1676  VisitCompare(selector, opcode, g.UseRegister(value), g.TempImmediate(0),
1677               cont);
1678}
1679
1680
1681void VisitWord32CompareZero(InstructionSelector* selector, Node* user,
1682                            Node* value, FlagsContinuation* cont) {
1683  VisitWordCompareZero(selector, user, value, kPPC_Cmp32, cont);
1684}
1685
1686
1687#if V8_TARGET_ARCH_PPC64
1688void VisitWord64CompareZero(InstructionSelector* selector, Node* user,
1689                            Node* value, FlagsContinuation* cont) {
1690  VisitWordCompareZero(selector, user, value, kPPC_Cmp64, cont);
1691}
1692#endif
1693
1694}  // namespace
1695
1696
1697void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1698                                      BasicBlock* fbranch) {
1699  FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1700  VisitWord32CompareZero(this, branch, branch->InputAt(0), &cont);
1701}
1702
1703void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1704  FlagsContinuation cont =
1705      FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
1706  VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1707}
1708
1709void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1710  FlagsContinuation cont =
1711      FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
1712  VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1713}
1714
1715void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1716  PPCOperandGenerator g(this);
1717  InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1718
1719  // Emit either ArchTableSwitch or ArchLookupSwitch.
1720  size_t table_space_cost = 4 + sw.value_range;
1721  size_t table_time_cost = 3;
1722  size_t lookup_space_cost = 3 + 2 * sw.case_count;
1723  size_t lookup_time_cost = sw.case_count;
1724  if (sw.case_count > 0 &&
1725      table_space_cost + 3 * table_time_cost <=
1726          lookup_space_cost + 3 * lookup_time_cost &&
1727      sw.min_value > std::numeric_limits<int32_t>::min()) {
1728    InstructionOperand index_operand = value_operand;
1729    if (sw.min_value) {
1730      index_operand = g.TempRegister();
1731      Emit(kPPC_Sub, index_operand, value_operand,
1732           g.TempImmediate(sw.min_value));
1733    }
1734    // Generate a table lookup.
1735    return EmitTableSwitch(sw, index_operand);
1736  }
1737
1738  // Generate a sequence of conditional jumps.
1739  return EmitLookupSwitch(sw, value_operand);
1740}
1741
1742
1743void InstructionSelector::VisitWord32Equal(Node* const node) {
1744  FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1745  Int32BinopMatcher m(node);
1746  if (m.right().Is(0)) {
1747    return VisitWord32CompareZero(this, m.node(), m.left().node(), &cont);
1748  }
1749  VisitWord32Compare(this, node, &cont);
1750}
1751
1752
1753void InstructionSelector::VisitInt32LessThan(Node* node) {
1754  FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1755  VisitWord32Compare(this, node, &cont);
1756}
1757
1758
1759void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1760  FlagsContinuation cont =
1761      FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1762  VisitWord32Compare(this, node, &cont);
1763}
1764
1765
1766void InstructionSelector::VisitUint32LessThan(Node* node) {
1767  FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1768  VisitWord32Compare(this, node, &cont);
1769}
1770
1771
1772void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1773  FlagsContinuation cont =
1774      FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1775  VisitWord32Compare(this, node, &cont);
1776}
1777
1778
1779#if V8_TARGET_ARCH_PPC64
1780void InstructionSelector::VisitWord64Equal(Node* const node) {
1781  FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1782  Int64BinopMatcher m(node);
1783  if (m.right().Is(0)) {
1784    return VisitWord64CompareZero(this, m.node(), m.left().node(), &cont);
1785  }
1786  VisitWord64Compare(this, node, &cont);
1787}
1788
1789
1790void InstructionSelector::VisitInt64LessThan(Node* node) {
1791  FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1792  VisitWord64Compare(this, node, &cont);
1793}
1794
1795
1796void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1797  FlagsContinuation cont =
1798      FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1799  VisitWord64Compare(this, node, &cont);
1800}
1801
1802
1803void InstructionSelector::VisitUint64LessThan(Node* node) {
1804  FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1805  VisitWord64Compare(this, node, &cont);
1806}
1807
1808
1809void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1810  FlagsContinuation cont =
1811      FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1812  VisitWord64Compare(this, node, &cont);
1813}
1814#endif
1815
1816
1817void InstructionSelector::VisitFloat32Equal(Node* node) {
1818  FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1819  VisitFloat32Compare(this, node, &cont);
1820}
1821
1822
1823void InstructionSelector::VisitFloat32LessThan(Node* node) {
1824  FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1825  VisitFloat32Compare(this, node, &cont);
1826}
1827
1828
1829void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1830  FlagsContinuation cont =
1831      FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1832  VisitFloat32Compare(this, node, &cont);
1833}
1834
1835
1836void InstructionSelector::VisitFloat64Equal(Node* node) {
1837  FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1838  VisitFloat64Compare(this, node, &cont);
1839}
1840
1841
1842void InstructionSelector::VisitFloat64LessThan(Node* node) {
1843  FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1844  VisitFloat64Compare(this, node, &cont);
1845}
1846
1847
1848void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1849  FlagsContinuation cont =
1850      FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1851  VisitFloat64Compare(this, node, &cont);
1852}
1853
1854
1855void InstructionSelector::EmitPrepareArguments(
1856    ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1857    Node* node) {
1858  PPCOperandGenerator g(this);
1859
1860  // Prepare for C function call.
1861  if (descriptor->IsCFunctionCall()) {
1862    Emit(kArchPrepareCallCFunction |
1863             MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1864         0, nullptr, 0, nullptr);
1865
1866    // Poke any stack arguments.
1867    int slot = kStackFrameExtraParamSlot;
1868    for (PushParameter input : (*arguments)) {
1869      Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1870           g.TempImmediate(slot));
1871      ++slot;
1872    }
1873  } else {
1874    // Push any stack arguments.
1875    int num_slots = static_cast<int>(descriptor->StackParameterCount());
1876    int slot = 0;
1877    for (PushParameter input : (*arguments)) {
1878      if (slot == 0) {
1879        DCHECK(input.node());
1880        Emit(kPPC_PushFrame, g.NoOutput(), g.UseRegister(input.node()),
1881             g.TempImmediate(num_slots));
1882      } else {
1883        // Skip any alignment holes in pushed nodes.
1884        if (input.node()) {
1885          Emit(kPPC_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1886               g.TempImmediate(slot));
1887        }
1888      }
1889      ++slot;
1890    }
1891  }
1892}
1893
1894
1895bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1896
1897int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1898
1899void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1900  PPCOperandGenerator g(this);
1901  Emit(kPPC_DoubleExtractLowWord32, g.DefineAsRegister(node),
1902       g.UseRegister(node->InputAt(0)));
1903}
1904
1905
1906void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1907  PPCOperandGenerator g(this);
1908  Emit(kPPC_DoubleExtractHighWord32, g.DefineAsRegister(node),
1909       g.UseRegister(node->InputAt(0)));
1910}
1911
1912
1913void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1914  PPCOperandGenerator g(this);
1915  Node* left = node->InputAt(0);
1916  Node* right = node->InputAt(1);
1917  if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1918      CanCover(node, left)) {
1919    left = left->InputAt(1);
1920    Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
1921         g.UseRegister(right));
1922    return;
1923  }
1924  Emit(kPPC_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
1925       g.UseRegister(left), g.UseRegister(right));
1926}
1927
1928
1929void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1930  PPCOperandGenerator g(this);
1931  Node* left = node->InputAt(0);
1932  Node* right = node->InputAt(1);
1933  if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
1934      CanCover(node, left)) {
1935    left = left->InputAt(1);
1936    Emit(kPPC_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
1937         g.UseRegister(left));
1938    return;
1939  }
1940  Emit(kPPC_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
1941       g.UseRegister(left), g.UseRegister(right));
1942}
1943
1944void InstructionSelector::VisitAtomicLoad(Node* node) {
1945  LoadRepresentation load_rep = LoadRepresentationOf(node->op());
1946  PPCOperandGenerator g(this);
1947  Node* base = node->InputAt(0);
1948  Node* index = node->InputAt(1);
1949  ArchOpcode opcode = kArchNop;
1950  switch (load_rep.representation()) {
1951    case MachineRepresentation::kWord8:
1952      opcode = load_rep.IsSigned() ? kAtomicLoadInt8 : kAtomicLoadUint8;
1953      break;
1954    case MachineRepresentation::kWord16:
1955      opcode = load_rep.IsSigned() ? kAtomicLoadInt16 : kAtomicLoadUint16;
1956      break;
1957    case MachineRepresentation::kWord32:
1958      opcode = kAtomicLoadWord32;
1959      break;
1960    default:
1961      UNREACHABLE();
1962      return;
1963  }
1964  Emit(opcode | AddressingModeField::encode(kMode_MRR),
1965      g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
1966}
1967
1968void InstructionSelector::VisitAtomicStore(Node* node) {
1969  MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
1970  PPCOperandGenerator g(this);
1971  Node* base = node->InputAt(0);
1972  Node* index = node->InputAt(1);
1973  Node* value = node->InputAt(2);
1974  ArchOpcode opcode = kArchNop;
1975  switch (rep) {
1976    case MachineRepresentation::kWord8:
1977      opcode = kAtomicStoreWord8;
1978      break;
1979    case MachineRepresentation::kWord16:
1980      opcode = kAtomicStoreWord16;
1981      break;
1982    case MachineRepresentation::kWord32:
1983      opcode = kAtomicStoreWord32;
1984      break;
1985    default:
1986      UNREACHABLE();
1987      return;
1988  }
1989
1990  InstructionOperand inputs[4];
1991  size_t input_count = 0;
1992  inputs[input_count++] = g.UseUniqueRegister(base);
1993  inputs[input_count++] = g.UseUniqueRegister(index);
1994  inputs[input_count++] = g.UseUniqueRegister(value);
1995  Emit(opcode | AddressingModeField::encode(kMode_MRR),
1996      0, nullptr, input_count, inputs);
1997}
1998
1999// static
2000MachineOperatorBuilder::Flags
2001InstructionSelector::SupportedMachineOperatorFlags() {
2002  return MachineOperatorBuilder::kFloat32RoundDown |
2003         MachineOperatorBuilder::kFloat64RoundDown |
2004         MachineOperatorBuilder::kFloat32RoundUp |
2005         MachineOperatorBuilder::kFloat64RoundUp |
2006         MachineOperatorBuilder::kFloat32RoundTruncate |
2007         MachineOperatorBuilder::kFloat64RoundTruncate |
2008         MachineOperatorBuilder::kFloat64RoundTiesAway |
2009         MachineOperatorBuilder::kWord32Popcnt |
2010         MachineOperatorBuilder::kWord64Popcnt;
2011  // We omit kWord32ShiftIsSafe as s[rl]w use 0x3f as a mask rather than 0x1f.
2012}
2013
2014// static
2015MachineOperatorBuilder::AlignmentRequirements
2016InstructionSelector::AlignmentRequirements() {
2017  return MachineOperatorBuilder::AlignmentRequirements::
2018      FullUnalignedAccessSupport();
2019}
2020
2021}  // namespace compiler
2022}  // namespace internal
2023}  // namespace v8
2024