1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_REGISTER_ALLOCATOR_H_
6#define V8_REGISTER_ALLOCATOR_H_
7
8#include "src/compiler/instruction.h"
9#include "src/ostreams.h"
10#include "src/register-configuration.h"
11#include "src/zone-containers.h"
12
13namespace v8 {
14namespace internal {
15namespace compiler {
16
17enum RegisterKind {
18  GENERAL_REGISTERS,
19  DOUBLE_REGISTERS
20};
21
22
23// This class represents a single point of a InstructionOperand's lifetime. For
24// each instruction there are four lifetime positions:
25//
26//   [[START, END], [START, END]]
27//
28// Where the first half position corresponds to
29//
30//  [GapPosition::START, GapPosition::END]
31//
32// and the second half position corresponds to
33//
34//  [Lifetime::USED_AT_START, Lifetime::USED_AT_END]
35//
36class LifetimePosition final {
37 public:
38  // Return the lifetime position that corresponds to the beginning of
39  // the gap with the given index.
40  static LifetimePosition GapFromInstructionIndex(int index) {
41    return LifetimePosition(index * kStep);
42  }
43  // Return the lifetime position that corresponds to the beginning of
44  // the instruction with the given index.
45  static LifetimePosition InstructionFromInstructionIndex(int index) {
46    return LifetimePosition(index * kStep + kHalfStep);
47  }
48
49  // Returns a numeric representation of this lifetime position.
50  int value() const { return value_; }
51
52  // Returns the index of the instruction to which this lifetime position
53  // corresponds.
54  int ToInstructionIndex() const {
55    DCHECK(IsValid());
56    return value_ / kStep;
57  }
58
59  // Returns true if this lifetime position corresponds to a START value
60  bool IsStart() const { return (value_ & (kHalfStep - 1)) == 0; }
61  // Returns true if this lifetime position corresponds to an END value
62  bool IsEnd() const { return (value_ & (kHalfStep - 1)) == 1; }
63  // Returns true if this lifetime position corresponds to a gap START value
64  bool IsFullStart() const { return (value_ & (kStep - 1)) == 0; }
65
66  bool IsGapPosition() const { return (value_ & 0x2) == 0; }
67  bool IsInstructionPosition() const { return !IsGapPosition(); }
68
69  // Returns the lifetime position for the current START.
70  LifetimePosition Start() const {
71    DCHECK(IsValid());
72    return LifetimePosition(value_ & ~(kHalfStep - 1));
73  }
74
75  // Returns the lifetime position for the current gap START.
76  LifetimePosition FullStart() const {
77    DCHECK(IsValid());
78    return LifetimePosition(value_ & ~(kStep - 1));
79  }
80
81  // Returns the lifetime position for the current END.
82  LifetimePosition End() const {
83    DCHECK(IsValid());
84    return LifetimePosition(Start().value_ + kHalfStep / 2);
85  }
86
87  // Returns the lifetime position for the beginning of the next START.
88  LifetimePosition NextStart() const {
89    DCHECK(IsValid());
90    return LifetimePosition(Start().value_ + kHalfStep);
91  }
92
93  // Returns the lifetime position for the beginning of the next gap START.
94  LifetimePosition NextFullStart() const {
95    DCHECK(IsValid());
96    return LifetimePosition(FullStart().value_ + kStep);
97  }
98
99  // Returns the lifetime position for the beginning of the previous START.
100  LifetimePosition PrevStart() const {
101    DCHECK(IsValid());
102    DCHECK(value_ >= kHalfStep);
103    return LifetimePosition(Start().value_ - kHalfStep);
104  }
105
106  // Constructs the lifetime position which does not correspond to any
107  // instruction.
108  LifetimePosition() : value_(-1) {}
109
110  // Returns true if this lifetime positions corrensponds to some
111  // instruction.
112  bool IsValid() const { return value_ != -1; }
113
114  bool operator<(const LifetimePosition& that) const {
115    return this->value_ < that.value_;
116  }
117
118  bool operator<=(const LifetimePosition& that) const {
119    return this->value_ <= that.value_;
120  }
121
122  bool operator==(const LifetimePosition& that) const {
123    return this->value_ == that.value_;
124  }
125
126  bool operator!=(const LifetimePosition& that) const {
127    return this->value_ != that.value_;
128  }
129
130  bool operator>(const LifetimePosition& that) const {
131    return this->value_ > that.value_;
132  }
133
134  bool operator>=(const LifetimePosition& that) const {
135    return this->value_ >= that.value_;
136  }
137
138  void Print() const;
139
140  static inline LifetimePosition Invalid() { return LifetimePosition(); }
141
142  static inline LifetimePosition MaxPosition() {
143    // We have to use this kind of getter instead of static member due to
144    // crash bug in GDB.
145    return LifetimePosition(kMaxInt);
146  }
147
148  static inline LifetimePosition FromInt(int value) {
149    return LifetimePosition(value);
150  }
151
152 private:
153  static const int kHalfStep = 2;
154  static const int kStep = 2 * kHalfStep;
155
156  // Code relies on kStep and kHalfStep being a power of two.
157  STATIC_ASSERT(IS_POWER_OF_TWO(kHalfStep));
158
159  explicit LifetimePosition(int value) : value_(value) {}
160
161  int value_;
162};
163
164
165std::ostream& operator<<(std::ostream& os, const LifetimePosition pos);
166
167
168// Representation of the non-empty interval [start,end[.
169class UseInterval final : public ZoneObject {
170 public:
171  UseInterval(LifetimePosition start, LifetimePosition end)
172      : start_(start), end_(end), next_(nullptr) {
173    DCHECK(start < end);
174  }
175
176  LifetimePosition start() const { return start_; }
177  void set_start(LifetimePosition start) { start_ = start; }
178  LifetimePosition end() const { return end_; }
179  void set_end(LifetimePosition end) { end_ = end; }
180  UseInterval* next() const { return next_; }
181  void set_next(UseInterval* next) { next_ = next; }
182
183  // Split this interval at the given position without effecting the
184  // live range that owns it. The interval must contain the position.
185  UseInterval* SplitAt(LifetimePosition pos, Zone* zone);
186
187  // If this interval intersects with other return smallest position
188  // that belongs to both of them.
189  LifetimePosition Intersect(const UseInterval* other) const {
190    if (other->start() < start_) return other->Intersect(this);
191    if (other->start() < end_) return other->start();
192    return LifetimePosition::Invalid();
193  }
194
195  bool Contains(LifetimePosition point) const {
196    return start_ <= point && point < end_;
197  }
198
199  // Returns the index of the first gap covered by this interval.
200  int FirstGapIndex() const {
201    int ret = start_.ToInstructionIndex();
202    if (start_.IsInstructionPosition()) {
203      ++ret;
204    }
205    return ret;
206  }
207
208  // Returns the index of the last gap covered by this interval.
209  int LastGapIndex() const {
210    int ret = end_.ToInstructionIndex();
211    if (end_.IsGapPosition() && end_.IsStart()) {
212      --ret;
213    }
214    return ret;
215  }
216
217 private:
218  LifetimePosition start_;
219  LifetimePosition end_;
220  UseInterval* next_;
221
222  DISALLOW_COPY_AND_ASSIGN(UseInterval);
223};
224
225
226enum class UsePositionType : uint8_t { kAny, kRequiresRegister, kRequiresSlot };
227
228
229enum class UsePositionHintType : uint8_t {
230  kNone,
231  kOperand,
232  kUsePos,
233  kPhi,
234  kUnresolved
235};
236
237
238static const int32_t kUnassignedRegister =
239    RegisterConfiguration::kMaxGeneralRegisters;
240
241
242static_assert(kUnassignedRegister <= RegisterConfiguration::kMaxDoubleRegisters,
243              "kUnassignedRegister too small");
244
245
246// Representation of a use position.
247class UsePosition final : public ZoneObject {
248 public:
249  UsePosition(LifetimePosition pos, InstructionOperand* operand, void* hint,
250              UsePositionHintType hint_type);
251
252  InstructionOperand* operand() const { return operand_; }
253  bool HasOperand() const { return operand_ != nullptr; }
254
255  bool RegisterIsBeneficial() const {
256    return RegisterBeneficialField::decode(flags_);
257  }
258  UsePositionType type() const { return TypeField::decode(flags_); }
259  void set_type(UsePositionType type, bool register_beneficial);
260
261  LifetimePosition pos() const { return pos_; }
262
263  UsePosition* next() const { return next_; }
264  void set_next(UsePosition* next) { next_ = next; }
265
266  // For hinting only.
267  void set_assigned_register(int register_code) {
268    flags_ = AssignedRegisterField::update(flags_, register_code);
269  }
270
271  UsePositionHintType hint_type() const {
272    return HintTypeField::decode(flags_);
273  }
274  bool HasHint() const;
275  bool HintRegister(int* register_code) const;
276  void ResolveHint(UsePosition* use_pos);
277  bool IsResolved() const {
278    return hint_type() != UsePositionHintType::kUnresolved;
279  }
280  static UsePositionHintType HintTypeForOperand(const InstructionOperand& op);
281
282 private:
283  typedef BitField<UsePositionType, 0, 2> TypeField;
284  typedef BitField<UsePositionHintType, 2, 3> HintTypeField;
285  typedef BitField<bool, 5, 1> RegisterBeneficialField;
286  typedef BitField<int32_t, 6, 6> AssignedRegisterField;
287
288  InstructionOperand* const operand_;
289  void* hint_;
290  UsePosition* next_;
291  LifetimePosition const pos_;
292  uint32_t flags_;
293
294  DISALLOW_COPY_AND_ASSIGN(UsePosition);
295};
296
297
298class SpillRange;
299class RegisterAllocationData;
300class TopLevelLiveRange;
301class LiveRangeGroup;
302
303// Representation of SSA values' live ranges as a collection of (continuous)
304// intervals over the instruction ordering.
305class LiveRange : public ZoneObject {
306 public:
307  UseInterval* first_interval() const { return first_interval_; }
308  UsePosition* first_pos() const { return first_pos_; }
309  TopLevelLiveRange* TopLevel() { return top_level_; }
310  const TopLevelLiveRange* TopLevel() const { return top_level_; }
311
312  bool IsTopLevel() const;
313
314  LiveRange* next() const { return next_; }
315
316  int relative_id() const { return relative_id_; }
317
318  bool IsEmpty() const { return first_interval() == nullptr; }
319
320  InstructionOperand GetAssignedOperand() const;
321
322  MachineRepresentation representation() const {
323    return RepresentationField::decode(bits_);
324  }
325
326  int assigned_register() const { return AssignedRegisterField::decode(bits_); }
327  bool HasRegisterAssigned() const {
328    return assigned_register() != kUnassignedRegister;
329  }
330  void set_assigned_register(int reg);
331  void UnsetAssignedRegister();
332
333  bool spilled() const { return SpilledField::decode(bits_); }
334  void Spill();
335
336  RegisterKind kind() const;
337
338  // Returns use position in this live range that follows both start
339  // and last processed use position.
340  UsePosition* NextUsePosition(LifetimePosition start) const;
341
342  // Returns use position for which register is required in this live
343  // range and which follows both start and last processed use position
344  UsePosition* NextRegisterPosition(LifetimePosition start) const;
345
346  // Returns the first use position requiring stack slot, or nullptr.
347  UsePosition* NextSlotPosition(LifetimePosition start) const;
348
349  // Returns use position for which register is beneficial in this live
350  // range and which follows both start and last processed use position
351  UsePosition* NextUsePositionRegisterIsBeneficial(
352      LifetimePosition start) const;
353
354  // Returns use position for which register is beneficial in this live
355  // range and which precedes start.
356  UsePosition* PreviousUsePositionRegisterIsBeneficial(
357      LifetimePosition start) const;
358
359  // Can this live range be spilled at this position.
360  bool CanBeSpilled(LifetimePosition pos) const;
361
362  // Splitting primitive used by both splitting and splintering members.
363  // Performs the split, but does not link the resulting ranges.
364  // The given position must follow the start of the range.
365  // All uses following the given position will be moved from this
366  // live range to the result live range.
367  // The current range will terminate at position, while result will start from
368  // position.
369  UsePosition* DetachAt(LifetimePosition position, LiveRange* result,
370                        Zone* zone);
371
372  // Detaches at position, and then links the resulting ranges. Returns the
373  // child, which starts at position.
374  LiveRange* SplitAt(LifetimePosition position, Zone* zone);
375
376  // Returns nullptr when no register is hinted, otherwise sets register_index.
377  UsePosition* FirstHintPosition(int* register_index) const;
378  UsePosition* FirstHintPosition() const {
379    int register_index;
380    return FirstHintPosition(&register_index);
381  }
382
383  UsePosition* current_hint_position() const {
384    DCHECK(current_hint_position_ == FirstHintPosition());
385    return current_hint_position_;
386  }
387
388  LifetimePosition Start() const {
389    DCHECK(!IsEmpty());
390    return first_interval()->start();
391  }
392
393  LifetimePosition End() const {
394    DCHECK(!IsEmpty());
395    return last_interval_->end();
396  }
397
398  bool ShouldBeAllocatedBefore(const LiveRange* other) const;
399  bool CanCover(LifetimePosition position) const;
400  bool Covers(LifetimePosition position) const;
401  LifetimePosition FirstIntersection(LiveRange* other) const;
402
403  void VerifyChildStructure() const {
404    VerifyIntervals();
405    VerifyPositions();
406  }
407
408  void ConvertUsesToOperand(const InstructionOperand& op,
409                            const InstructionOperand& spill_op);
410  void SetUseHints(int register_index);
411  void UnsetUseHints() { SetUseHints(kUnassignedRegister); }
412
413  // Used solely by the Greedy Allocator:
414  unsigned GetSize();
415  float weight() const { return weight_; }
416  void set_weight(float weight) { weight_ = weight; }
417  LiveRangeGroup* group() const { return group_; }
418  void set_group(LiveRangeGroup* group) { group_ = group; }
419  void Print(const RegisterConfiguration* config, bool with_children) const;
420  void Print(bool with_children) const;
421
422  static const int kInvalidSize = -1;
423  static const float kInvalidWeight;
424  static const float kMaxWeight;
425
426 private:
427  friend class TopLevelLiveRange;
428  explicit LiveRange(int relative_id, MachineRepresentation rep,
429                     TopLevelLiveRange* top_level);
430
431  void UpdateParentForAllChildren(TopLevelLiveRange* new_top_level);
432
433  void set_spilled(bool value) { bits_ = SpilledField::update(bits_, value); }
434
435  UseInterval* FirstSearchIntervalForPosition(LifetimePosition position) const;
436  void AdvanceLastProcessedMarker(UseInterval* to_start_of,
437                                  LifetimePosition but_not_past) const;
438
439  void VerifyPositions() const;
440  void VerifyIntervals() const;
441
442  typedef BitField<bool, 0, 1> SpilledField;
443  typedef BitField<int32_t, 6, 6> AssignedRegisterField;
444  typedef BitField<MachineRepresentation, 12, 8> RepresentationField;
445
446  // Unique among children and splinters of the same virtual register.
447  int relative_id_;
448  uint32_t bits_;
449  UseInterval* last_interval_;
450  UseInterval* first_interval_;
451  UsePosition* first_pos_;
452  TopLevelLiveRange* top_level_;
453  LiveRange* next_;
454  // This is used as a cache, it doesn't affect correctness.
455  mutable UseInterval* current_interval_;
456  // This is used as a cache, it doesn't affect correctness.
457  mutable UsePosition* last_processed_use_;
458  // This is used as a cache, it's invalid outside of BuildLiveRanges.
459  mutable UsePosition* current_hint_position_;
460  // Cache the last position splintering stopped at.
461  mutable UsePosition* splitting_pointer_;
462  // greedy: the number of LifetimePositions covered by this range. Used to
463  // prioritize selecting live ranges for register assignment, as well as
464  // in weight calculations.
465  int size_;
466
467  // greedy: a metric for resolving conflicts between ranges with an assigned
468  // register and ranges that intersect them and need a register.
469  float weight_;
470
471  // greedy: groupping
472  LiveRangeGroup* group_;
473
474  DISALLOW_COPY_AND_ASSIGN(LiveRange);
475};
476
477
478class LiveRangeGroup final : public ZoneObject {
479 public:
480  explicit LiveRangeGroup(Zone* zone) : ranges_(zone) {}
481  ZoneVector<LiveRange*>& ranges() { return ranges_; }
482  const ZoneVector<LiveRange*>& ranges() const { return ranges_; }
483
484  // TODO(mtrofin): populate assigned register and use in weight calculation.
485  int assigned_register() const { return assigned_register_; }
486  void set_assigned_register(int reg) { assigned_register_ = reg; }
487
488 private:
489  ZoneVector<LiveRange*> ranges_;
490  int assigned_register_;
491  DISALLOW_COPY_AND_ASSIGN(LiveRangeGroup);
492};
493
494
495class TopLevelLiveRange final : public LiveRange {
496 public:
497  explicit TopLevelLiveRange(int vreg, MachineRepresentation rep);
498  int spill_start_index() const { return spill_start_index_; }
499
500  bool IsFixed() const { return vreg_ < 0; }
501
502  bool is_phi() const { return IsPhiField::decode(bits_); }
503  void set_is_phi(bool value) { bits_ = IsPhiField::update(bits_, value); }
504
505  bool is_non_loop_phi() const { return IsNonLoopPhiField::decode(bits_); }
506  void set_is_non_loop_phi(bool value) {
507    bits_ = IsNonLoopPhiField::update(bits_, value);
508  }
509
510  bool has_slot_use() const { return HasSlotUseField::decode(bits_); }
511  void set_has_slot_use(bool value) {
512    bits_ = HasSlotUseField::update(bits_, value);
513  }
514
515  // Add a new interval or a new use position to this live range.
516  void EnsureInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
517  void AddUseInterval(LifetimePosition start, LifetimePosition end, Zone* zone);
518  void AddUsePosition(UsePosition* pos);
519
520  // Shorten the most recently added interval by setting a new start.
521  void ShortenTo(LifetimePosition start);
522
523  // Detaches between start and end, and attributes the resulting range to
524  // result.
525  // The current range is pointed to as "splintered_from". No parent/child
526  // relationship is established between this and result.
527  void Splinter(LifetimePosition start, LifetimePosition end, Zone* zone);
528
529  // Assuming other was splintered from this range, embeds other and its
530  // children as part of the children sequence of this range.
531  void Merge(TopLevelLiveRange* other, Zone* zone);
532
533  // Spill range management.
534  void SetSpillRange(SpillRange* spill_range);
535  enum class SpillType { kNoSpillType, kSpillOperand, kSpillRange };
536  void set_spill_type(SpillType value) {
537    bits_ = SpillTypeField::update(bits_, value);
538  }
539  SpillType spill_type() const { return SpillTypeField::decode(bits_); }
540  InstructionOperand* GetSpillOperand() const {
541    DCHECK(spill_type() == SpillType::kSpillOperand);
542    return spill_operand_;
543  }
544
545  SpillRange* GetAllocatedSpillRange() const {
546    DCHECK(spill_type() != SpillType::kSpillOperand);
547    return spill_range_;
548  }
549
550  SpillRange* GetSpillRange() const {
551    DCHECK(spill_type() == SpillType::kSpillRange);
552    return spill_range_;
553  }
554  bool HasNoSpillType() const {
555    return spill_type() == SpillType::kNoSpillType;
556  }
557  bool HasSpillOperand() const {
558    return spill_type() == SpillType::kSpillOperand;
559  }
560  bool HasSpillRange() const { return spill_type() == SpillType::kSpillRange; }
561
562  AllocatedOperand GetSpillRangeOperand() const;
563
564  void RecordSpillLocation(Zone* zone, int gap_index,
565                           InstructionOperand* operand);
566  void SetSpillOperand(InstructionOperand* operand);
567  void SetSpillStartIndex(int start) {
568    spill_start_index_ = Min(start, spill_start_index_);
569  }
570
571  void CommitSpillMoves(InstructionSequence* sequence,
572                        const InstructionOperand& operand,
573                        bool might_be_duplicated);
574
575  // If all the children of this range are spilled in deferred blocks, and if
576  // for any non-spilled child with a use position requiring a slot, that range
577  // is contained in a deferred block, mark the range as
578  // IsSpilledOnlyInDeferredBlocks, so that we avoid spilling at definition,
579  // and instead let the LiveRangeConnector perform the spills within the
580  // deferred blocks. If so, we insert here spills for non-spilled ranges
581  // with slot use positions.
582  void MarkSpilledInDeferredBlock() {
583    spill_start_index_ = -1;
584    spilled_in_deferred_blocks_ = true;
585    spill_move_insertion_locations_ = nullptr;
586  }
587
588  bool TryCommitSpillInDeferredBlock(InstructionSequence* code,
589                                     const InstructionOperand& spill_operand);
590
591  TopLevelLiveRange* splintered_from() const { return splintered_from_; }
592  bool IsSplinter() const { return splintered_from_ != nullptr; }
593  bool MayRequireSpillRange() const {
594    DCHECK(!IsSplinter());
595    return !HasSpillOperand() && spill_range_ == nullptr;
596  }
597  void UpdateSpillRangePostMerge(TopLevelLiveRange* merged);
598  int vreg() const { return vreg_; }
599
600#if DEBUG
601  int debug_virt_reg() const;
602#endif
603
604  void Verify() const;
605  void VerifyChildrenInOrder() const;
606
607  int GetNextChildId() {
608    return IsSplinter() ? splintered_from()->GetNextChildId()
609                        : ++last_child_id_;
610  }
611
612  int GetChildCount() const { return last_child_id_ + 1; }
613
614  bool IsSpilledOnlyInDeferredBlocks() const {
615    return spilled_in_deferred_blocks_;
616  }
617
618  struct SpillMoveInsertionList;
619
620  SpillMoveInsertionList* spill_move_insertion_locations() const {
621    return spill_move_insertion_locations_;
622  }
623  TopLevelLiveRange* splinter() const { return splinter_; }
624  void SetSplinter(TopLevelLiveRange* splinter) {
625    DCHECK_NULL(splinter_);
626    DCHECK_NOT_NULL(splinter);
627
628    splinter_ = splinter;
629    splinter->relative_id_ = GetNextChildId();
630    splinter->set_spill_type(spill_type());
631    splinter->SetSplinteredFrom(this);
632  }
633
634  void MarkHasPreassignedSlot() { has_preassigned_slot_ = true; }
635  bool has_preassigned_slot() const { return has_preassigned_slot_; }
636
637 private:
638  void SetSplinteredFrom(TopLevelLiveRange* splinter_parent);
639
640  typedef BitField<bool, 1, 1> HasSlotUseField;
641  typedef BitField<bool, 2, 1> IsPhiField;
642  typedef BitField<bool, 3, 1> IsNonLoopPhiField;
643  typedef BitField<SpillType, 4, 2> SpillTypeField;
644
645  int vreg_;
646  int last_child_id_;
647  TopLevelLiveRange* splintered_from_;
648  union {
649    // Correct value determined by spill_type()
650    InstructionOperand* spill_operand_;
651    SpillRange* spill_range_;
652  };
653  SpillMoveInsertionList* spill_move_insertion_locations_;
654  // TODO(mtrofin): generalize spilling after definition, currently specialized
655  // just for spill in a single deferred block.
656  bool spilled_in_deferred_blocks_;
657  int spill_start_index_;
658  UsePosition* last_pos_;
659  TopLevelLiveRange* splinter_;
660  bool has_preassigned_slot_;
661
662  DISALLOW_COPY_AND_ASSIGN(TopLevelLiveRange);
663};
664
665
666struct PrintableLiveRange {
667  const RegisterConfiguration* register_configuration_;
668  const LiveRange* range_;
669};
670
671
672std::ostream& operator<<(std::ostream& os,
673                         const PrintableLiveRange& printable_range);
674
675
676class SpillRange final : public ZoneObject {
677 public:
678  static const int kUnassignedSlot = -1;
679  SpillRange(TopLevelLiveRange* range, Zone* zone);
680
681  UseInterval* interval() const { return use_interval_; }
682  // Currently, only 4 or 8 byte slots are supported.
683  int ByteWidth() const;
684  bool IsEmpty() const { return live_ranges_.empty(); }
685  bool TryMerge(SpillRange* other);
686  bool HasSlot() const { return assigned_slot_ != kUnassignedSlot; }
687
688  void set_assigned_slot(int index) {
689    DCHECK_EQ(kUnassignedSlot, assigned_slot_);
690    assigned_slot_ = index;
691  }
692  int assigned_slot() {
693    DCHECK_NE(kUnassignedSlot, assigned_slot_);
694    return assigned_slot_;
695  }
696  const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
697    return live_ranges_;
698  }
699  ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
700  int byte_width() const { return byte_width_; }
701  RegisterKind kind() const { return kind_; }
702  void Print() const;
703
704 private:
705  LifetimePosition End() const { return end_position_; }
706  bool IsIntersectingWith(SpillRange* other) const;
707  // Merge intervals, making sure the use intervals are sorted
708  void MergeDisjointIntervals(UseInterval* other);
709
710  ZoneVector<TopLevelLiveRange*> live_ranges_;
711  UseInterval* use_interval_;
712  LifetimePosition end_position_;
713  int assigned_slot_;
714  int byte_width_;
715  RegisterKind kind_;
716
717  DISALLOW_COPY_AND_ASSIGN(SpillRange);
718};
719
720
721class RegisterAllocationData final : public ZoneObject {
722 public:
723  class PhiMapValue : public ZoneObject {
724   public:
725    PhiMapValue(PhiInstruction* phi, const InstructionBlock* block, Zone* zone);
726
727    const PhiInstruction* phi() const { return phi_; }
728    const InstructionBlock* block() const { return block_; }
729
730    // For hinting.
731    int assigned_register() const { return assigned_register_; }
732    void set_assigned_register(int register_code) {
733      DCHECK_EQ(assigned_register_, kUnassignedRegister);
734      assigned_register_ = register_code;
735    }
736    void UnsetAssignedRegister() { assigned_register_ = kUnassignedRegister; }
737
738    void AddOperand(InstructionOperand* operand);
739    void CommitAssignment(const InstructionOperand& operand);
740
741   private:
742    PhiInstruction* const phi_;
743    const InstructionBlock* const block_;
744    ZoneVector<InstructionOperand*> incoming_operands_;
745    int assigned_register_;
746  };
747  typedef ZoneMap<int, PhiMapValue*> PhiMap;
748
749  struct DelayedReference {
750    ReferenceMap* map;
751    InstructionOperand* operand;
752  };
753  typedef ZoneVector<DelayedReference> DelayedReferences;
754  typedef ZoneVector<std::pair<TopLevelLiveRange*, int>>
755      RangesWithPreassignedSlots;
756
757  RegisterAllocationData(const RegisterConfiguration* config,
758                         Zone* allocation_zone, Frame* frame,
759                         InstructionSequence* code,
760                         const char* debug_name = nullptr);
761
762  const ZoneVector<TopLevelLiveRange*>& live_ranges() const {
763    return live_ranges_;
764  }
765  ZoneVector<TopLevelLiveRange*>& live_ranges() { return live_ranges_; }
766  const ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() const {
767    return fixed_live_ranges_;
768  }
769  ZoneVector<TopLevelLiveRange*>& fixed_live_ranges() {
770    return fixed_live_ranges_;
771  }
772  ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() {
773    return fixed_double_live_ranges_;
774  }
775  const ZoneVector<TopLevelLiveRange*>& fixed_double_live_ranges() const {
776    return fixed_double_live_ranges_;
777  }
778  ZoneVector<BitVector*>& live_in_sets() { return live_in_sets_; }
779  ZoneVector<BitVector*>& live_out_sets() { return live_out_sets_; }
780  ZoneVector<SpillRange*>& spill_ranges() { return spill_ranges_; }
781  DelayedReferences& delayed_references() { return delayed_references_; }
782  InstructionSequence* code() const { return code_; }
783  // This zone is for datastructures only needed during register allocation
784  // phases.
785  Zone* allocation_zone() const { return allocation_zone_; }
786  // This zone is for InstructionOperands and moves that live beyond register
787  // allocation.
788  Zone* code_zone() const { return code()->zone(); }
789  Frame* frame() const { return frame_; }
790  const char* debug_name() const { return debug_name_; }
791  const RegisterConfiguration* config() const { return config_; }
792
793  MachineRepresentation RepresentationFor(int virtual_register);
794
795  TopLevelLiveRange* GetOrCreateLiveRangeFor(int index);
796  // Creates a new live range.
797  TopLevelLiveRange* NewLiveRange(int index, MachineRepresentation rep);
798  TopLevelLiveRange* NextLiveRange(MachineRepresentation rep);
799
800  SpillRange* AssignSpillRangeToLiveRange(TopLevelLiveRange* range);
801  SpillRange* CreateSpillRangeForLiveRange(TopLevelLiveRange* range);
802
803  MoveOperands* AddGapMove(int index, Instruction::GapPosition position,
804                           const InstructionOperand& from,
805                           const InstructionOperand& to);
806
807  bool IsReference(TopLevelLiveRange* top_range) const {
808    return code()->IsReference(top_range->vreg());
809  }
810
811  bool ExistsUseWithoutDefinition();
812  bool RangesDefinedInDeferredStayInDeferred();
813
814  void MarkAllocated(RegisterKind kind, int index);
815
816  PhiMapValue* InitializePhiMap(const InstructionBlock* block,
817                                PhiInstruction* phi);
818  PhiMapValue* GetPhiMapValueFor(TopLevelLiveRange* top_range);
819  PhiMapValue* GetPhiMapValueFor(int virtual_register);
820  bool IsBlockBoundary(LifetimePosition pos) const;
821
822  RangesWithPreassignedSlots& preassigned_slot_ranges() {
823    return preassigned_slot_ranges_;
824  }
825
826 private:
827  int GetNextLiveRangeId();
828
829  Zone* const allocation_zone_;
830  Frame* const frame_;
831  InstructionSequence* const code_;
832  const char* const debug_name_;
833  const RegisterConfiguration* const config_;
834  PhiMap phi_map_;
835  ZoneVector<int> allocatable_codes_;
836  ZoneVector<int> allocatable_double_codes_;
837  ZoneVector<BitVector*> live_in_sets_;
838  ZoneVector<BitVector*> live_out_sets_;
839  ZoneVector<TopLevelLiveRange*> live_ranges_;
840  ZoneVector<TopLevelLiveRange*> fixed_live_ranges_;
841  ZoneVector<TopLevelLiveRange*> fixed_double_live_ranges_;
842  ZoneVector<SpillRange*> spill_ranges_;
843  DelayedReferences delayed_references_;
844  BitVector* assigned_registers_;
845  BitVector* assigned_double_registers_;
846  int virtual_register_count_;
847  RangesWithPreassignedSlots preassigned_slot_ranges_;
848
849  DISALLOW_COPY_AND_ASSIGN(RegisterAllocationData);
850};
851
852
853class ConstraintBuilder final : public ZoneObject {
854 public:
855  explicit ConstraintBuilder(RegisterAllocationData* data);
856
857  // Phase 1 : insert moves to account for fixed register operands.
858  void MeetRegisterConstraints();
859
860  // Phase 2: deconstruct SSA by inserting moves in successors and the headers
861  // of blocks containing phis.
862  void ResolvePhis();
863
864 private:
865  RegisterAllocationData* data() const { return data_; }
866  InstructionSequence* code() const { return data()->code(); }
867  Zone* allocation_zone() const { return data()->allocation_zone(); }
868
869  InstructionOperand* AllocateFixed(UnallocatedOperand* operand, int pos,
870                                    bool is_tagged);
871  void MeetRegisterConstraints(const InstructionBlock* block);
872  void MeetConstraintsBefore(int index);
873  void MeetConstraintsAfter(int index);
874  void MeetRegisterConstraintsForLastInstructionInBlock(
875      const InstructionBlock* block);
876  void ResolvePhis(const InstructionBlock* block);
877
878  RegisterAllocationData* const data_;
879
880  DISALLOW_COPY_AND_ASSIGN(ConstraintBuilder);
881};
882
883
884class LiveRangeBuilder final : public ZoneObject {
885 public:
886  explicit LiveRangeBuilder(RegisterAllocationData* data, Zone* local_zone);
887
888  // Phase 3: compute liveness of all virtual register.
889  void BuildLiveRanges();
890  static BitVector* ComputeLiveOut(const InstructionBlock* block,
891                                   RegisterAllocationData* data);
892
893 private:
894  RegisterAllocationData* data() const { return data_; }
895  InstructionSequence* code() const { return data()->code(); }
896  Zone* allocation_zone() const { return data()->allocation_zone(); }
897  Zone* code_zone() const { return code()->zone(); }
898  const RegisterConfiguration* config() const { return data()->config(); }
899  ZoneVector<BitVector*>& live_in_sets() const {
900    return data()->live_in_sets();
901  }
902
903  void Verify() const;
904
905  // Liveness analysis support.
906  void AddInitialIntervals(const InstructionBlock* block, BitVector* live_out);
907  void ProcessInstructions(const InstructionBlock* block, BitVector* live);
908  void ProcessPhis(const InstructionBlock* block, BitVector* live);
909  void ProcessLoopHeader(const InstructionBlock* block, BitVector* live);
910
911  static int FixedLiveRangeID(int index) { return -index - 1; }
912  int FixedDoubleLiveRangeID(int index);
913  TopLevelLiveRange* FixedLiveRangeFor(int index);
914  TopLevelLiveRange* FixedDoubleLiveRangeFor(int index);
915
916  void MapPhiHint(InstructionOperand* operand, UsePosition* use_pos);
917  void ResolvePhiHint(InstructionOperand* operand, UsePosition* use_pos);
918
919  UsePosition* NewUsePosition(LifetimePosition pos, InstructionOperand* operand,
920                              void* hint, UsePositionHintType hint_type);
921  UsePosition* NewUsePosition(LifetimePosition pos) {
922    return NewUsePosition(pos, nullptr, nullptr, UsePositionHintType::kNone);
923  }
924  TopLevelLiveRange* LiveRangeFor(InstructionOperand* operand);
925  // Helper methods for building intervals.
926  UsePosition* Define(LifetimePosition position, InstructionOperand* operand,
927                      void* hint, UsePositionHintType hint_type);
928  void Define(LifetimePosition position, InstructionOperand* operand) {
929    Define(position, operand, nullptr, UsePositionHintType::kNone);
930  }
931  UsePosition* Use(LifetimePosition block_start, LifetimePosition position,
932                   InstructionOperand* operand, void* hint,
933                   UsePositionHintType hint_type);
934  void Use(LifetimePosition block_start, LifetimePosition position,
935           InstructionOperand* operand) {
936    Use(block_start, position, operand, nullptr, UsePositionHintType::kNone);
937  }
938
939  RegisterAllocationData* const data_;
940  ZoneMap<InstructionOperand*, UsePosition*> phi_hints_;
941
942  DISALLOW_COPY_AND_ASSIGN(LiveRangeBuilder);
943};
944
945
946class RegisterAllocator : public ZoneObject {
947 public:
948  explicit RegisterAllocator(RegisterAllocationData* data, RegisterKind kind);
949
950 protected:
951  RegisterAllocationData* data() const { return data_; }
952  InstructionSequence* code() const { return data()->code(); }
953  RegisterKind mode() const { return mode_; }
954  int num_registers() const { return num_registers_; }
955  int num_allocatable_registers() const { return num_allocatable_registers_; }
956  int allocatable_register_code(int allocatable_index) const {
957    return allocatable_register_codes_[allocatable_index];
958  }
959
960  // TODO(mtrofin): explain why splitting in gap START is always OK.
961  LifetimePosition GetSplitPositionForInstruction(const LiveRange* range,
962                                                  int instruction_index);
963
964  Zone* allocation_zone() const { return data()->allocation_zone(); }
965
966  // Find the optimal split for ranges defined by a memory operand, e.g.
967  // constants or function parameters passed on the stack.
968  void SplitAndSpillRangesDefinedByMemoryOperand(bool operands_only);
969
970  // Split the given range at the given position.
971  // If range starts at or after the given position then the
972  // original range is returned.
973  // Otherwise returns the live range that starts at pos and contains
974  // all uses from the original range that follow pos. Uses at pos will
975  // still be owned by the original range after splitting.
976  LiveRange* SplitRangeAt(LiveRange* range, LifetimePosition pos);
977
978  bool CanProcessRange(LiveRange* range) const {
979    return range != nullptr && !range->IsEmpty() && range->kind() == mode();
980  }
981
982
983  // Split the given range in a position from the interval [start, end].
984  LiveRange* SplitBetween(LiveRange* range, LifetimePosition start,
985                          LifetimePosition end);
986
987  // Find a lifetime position in the interval [start, end] which
988  // is optimal for splitting: it is either header of the outermost
989  // loop covered by this interval or the latest possible position.
990  LifetimePosition FindOptimalSplitPos(LifetimePosition start,
991                                       LifetimePosition end);
992
993  void Spill(LiveRange* range);
994
995  // If we are trying to spill a range inside the loop try to
996  // hoist spill position out to the point just before the loop.
997  LifetimePosition FindOptimalSpillingPos(LiveRange* range,
998                                          LifetimePosition pos);
999
1000  const ZoneVector<TopLevelLiveRange*>& GetFixedRegisters() const;
1001  const char* RegisterName(int allocation_index) const;
1002
1003 private:
1004  RegisterAllocationData* const data_;
1005  const RegisterKind mode_;
1006  const int num_registers_;
1007  int num_allocatable_registers_;
1008  const int* allocatable_register_codes_;
1009
1010  DISALLOW_COPY_AND_ASSIGN(RegisterAllocator);
1011};
1012
1013
1014class LinearScanAllocator final : public RegisterAllocator {
1015 public:
1016  LinearScanAllocator(RegisterAllocationData* data, RegisterKind kind,
1017                      Zone* local_zone);
1018
1019  // Phase 4: compute register assignments.
1020  void AllocateRegisters();
1021
1022 private:
1023  ZoneVector<LiveRange*>& unhandled_live_ranges() {
1024    return unhandled_live_ranges_;
1025  }
1026  ZoneVector<LiveRange*>& active_live_ranges() { return active_live_ranges_; }
1027  ZoneVector<LiveRange*>& inactive_live_ranges() {
1028    return inactive_live_ranges_;
1029  }
1030
1031  void SetLiveRangeAssignedRegister(LiveRange* range, int reg);
1032
1033  // Helper methods for updating the life range lists.
1034  void AddToActive(LiveRange* range);
1035  void AddToInactive(LiveRange* range);
1036  void AddToUnhandledSorted(LiveRange* range);
1037  void AddToUnhandledUnsorted(LiveRange* range);
1038  void SortUnhandled();
1039  bool UnhandledIsSorted();
1040  void ActiveToHandled(LiveRange* range);
1041  void ActiveToInactive(LiveRange* range);
1042  void InactiveToHandled(LiveRange* range);
1043  void InactiveToActive(LiveRange* range);
1044
1045  // Helper methods for allocating registers.
1046  bool TryReuseSpillForPhi(TopLevelLiveRange* range);
1047  bool TryAllocateFreeReg(LiveRange* range);
1048  void AllocateBlockedReg(LiveRange* range);
1049
1050  // Spill the given life range after position pos.
1051  void SpillAfter(LiveRange* range, LifetimePosition pos);
1052
1053  // Spill the given life range after position [start] and up to position [end].
1054  void SpillBetween(LiveRange* range, LifetimePosition start,
1055                    LifetimePosition end);
1056
1057  // Spill the given life range after position [start] and up to position [end].
1058  // Range is guaranteed to be spilled at least until position [until].
1059  void SpillBetweenUntil(LiveRange* range, LifetimePosition start,
1060                         LifetimePosition until, LifetimePosition end);
1061
1062  void SplitAndSpillIntersecting(LiveRange* range);
1063
1064  ZoneVector<LiveRange*> unhandled_live_ranges_;
1065  ZoneVector<LiveRange*> active_live_ranges_;
1066  ZoneVector<LiveRange*> inactive_live_ranges_;
1067
1068#ifdef DEBUG
1069  LifetimePosition allocation_finger_;
1070#endif
1071
1072  DISALLOW_COPY_AND_ASSIGN(LinearScanAllocator);
1073};
1074
1075
1076class SpillSlotLocator final : public ZoneObject {
1077 public:
1078  explicit SpillSlotLocator(RegisterAllocationData* data);
1079
1080  void LocateSpillSlots();
1081
1082 private:
1083  RegisterAllocationData* data() const { return data_; }
1084
1085  RegisterAllocationData* const data_;
1086
1087  DISALLOW_COPY_AND_ASSIGN(SpillSlotLocator);
1088};
1089
1090
1091class OperandAssigner final : public ZoneObject {
1092 public:
1093  explicit OperandAssigner(RegisterAllocationData* data);
1094
1095  // Phase 5: assign spill splots.
1096  void AssignSpillSlots();
1097
1098  // Phase 6: commit assignment.
1099  void CommitAssignment();
1100
1101 private:
1102  RegisterAllocationData* data() const { return data_; }
1103
1104  RegisterAllocationData* const data_;
1105
1106  DISALLOW_COPY_AND_ASSIGN(OperandAssigner);
1107};
1108
1109
1110class ReferenceMapPopulator final : public ZoneObject {
1111 public:
1112  explicit ReferenceMapPopulator(RegisterAllocationData* data);
1113
1114  // Phase 7: compute values for pointer maps.
1115  void PopulateReferenceMaps();
1116
1117 private:
1118  RegisterAllocationData* data() const { return data_; }
1119
1120  bool SafePointsAreInOrder() const;
1121
1122  RegisterAllocationData* const data_;
1123
1124  DISALLOW_COPY_AND_ASSIGN(ReferenceMapPopulator);
1125};
1126
1127
1128// Insert moves of the form
1129//
1130//          Operand(child_(k+1)) = Operand(child_k)
1131//
1132// where child_k and child_(k+1) are consecutive children of a range (so
1133// child_k->next() == child_(k+1)), and Operand(...) refers to the
1134// assigned operand, be it a register or a slot.
1135class LiveRangeConnector final : public ZoneObject {
1136 public:
1137  explicit LiveRangeConnector(RegisterAllocationData* data);
1138
1139  // Phase 8: reconnect split ranges with moves, when the control flow
1140  // between the ranges is trivial (no branches).
1141  void ConnectRanges(Zone* local_zone);
1142
1143  // Phase 9: insert moves to connect ranges across basic blocks, when the
1144  // control flow between them cannot be trivially resolved, such as joining
1145  // branches.
1146  void ResolveControlFlow(Zone* local_zone);
1147
1148 private:
1149  RegisterAllocationData* data() const { return data_; }
1150  InstructionSequence* code() const { return data()->code(); }
1151  Zone* code_zone() const { return code()->zone(); }
1152
1153  bool CanEagerlyResolveControlFlow(const InstructionBlock* block) const;
1154
1155  int ResolveControlFlow(const InstructionBlock* block,
1156                         const InstructionOperand& cur_op,
1157                         const InstructionBlock* pred,
1158                         const InstructionOperand& pred_op);
1159
1160  RegisterAllocationData* const data_;
1161
1162  DISALLOW_COPY_AND_ASSIGN(LiveRangeConnector);
1163};
1164
1165}  // namespace compiler
1166}  // namespace internal
1167}  // namespace v8
1168
1169#endif  // V8_REGISTER_ALLOCATOR_H_
1170