1// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COALESCED_LIVE_RANGES_H_
6#define V8_COALESCED_LIVE_RANGES_H_
7
8#include "src/compiler/register-allocator.h"
9#include "src/zone-containers.h"
10
11namespace v8 {
12namespace internal {
13namespace compiler {
14
15
16// Implementation detail for CoalescedLiveRanges.
17struct AllocatedInterval {
18  AllocatedInterval(LifetimePosition start, LifetimePosition end,
19                    LiveRange* range)
20      : start_(start), end_(end), range_(range) {}
21
22  LifetimePosition start_;
23  LifetimePosition end_;
24  LiveRange* range_;
25  bool operator<(const AllocatedInterval& other) const {
26    return start_ < other.start_;
27  }
28  bool operator>(const AllocatedInterval& other) const {
29    return start_ > other.start_;
30  }
31};
32typedef ZoneSet<AllocatedInterval> IntervalStore;
33
34
35// An iterator over conflicts of a live range, obtained from CoalescedLiveRanges
36// The design supports two main scenarios (see GreedyAllocator):
37// (1) observing each conflicting range, without mutating the allocations, and
38// (2) observing each conflicting range, and then moving to the next, after
39// removing the current conflict.
40class LiveRangeConflictIterator {
41 public:
42  // Current conflict. nullptr if no conflicts, or if we reached the end of
43  // conflicts.
44  LiveRange* Current() const;
45
46  // Get the next conflict. Caller should handle non-consecutive repetitions of
47  // the same range.
48  LiveRange* GetNext() { return InternalGetNext(false); }
49
50  // Get the next conflict, after evicting the current one. Caller may expect
51  // to never observe the same live range more than once.
52  LiveRange* RemoveCurrentAndGetNext() { return InternalGetNext(true); }
53
54 private:
55  friend class CoalescedLiveRanges;
56
57  typedef IntervalStore::const_iterator interval_iterator;
58  LiveRangeConflictIterator(const LiveRange* range, IntervalStore* store);
59
60  // Move the store iterator to  first interval intersecting query. Since the
61  // intervals are sorted, subsequent intervals intersecting query follow. May
62  // leave the store iterator at "end", meaning that the current query does not
63  // have an intersection.
64  void MovePosToFirstConflictForQuery();
65
66  // Move both query and store iterator to the first intersection, if any. If
67  // none, then it invalidates the iterator (IsFinished() == true)
68  void MovePosAndQueryToFirstConflict();
69
70  // Increment pos and skip over intervals belonging to the same range we
71  // started with (i.e. Current() before the call). It is possible that range
72  // will be seen again, but not consecutively.
73  void IncrementPosAndSkipOverRepetitions();
74
75  // Common implementation used by both GetNext as well as
76  // ClearCurrentAndGetNext.
77  LiveRange* InternalGetNext(bool clean_behind);
78
79  bool IsFinished() const { return query_ == nullptr; }
80
81  static AllocatedInterval AsAllocatedInterval(LifetimePosition pos) {
82    return AllocatedInterval(pos, LifetimePosition::Invalid(), nullptr);
83  }
84
85  // Intersection utilities.
86  static bool Intersects(LifetimePosition a_start, LifetimePosition a_end,
87                         LifetimePosition b_start, LifetimePosition b_end) {
88    return a_start < b_end && b_start < a_end;
89  }
90
91  bool QueryIntersectsAllocatedInterval() const {
92    DCHECK_NOT_NULL(query_);
93    return pos_ != intervals_->end() &&
94           Intersects(query_->start(), query_->end(), pos_->start_, pos_->end_);
95  }
96
97  void Invalidate() {
98    query_ = nullptr;
99    pos_ = intervals_->end();
100  }
101
102  const UseInterval* query_;
103  interval_iterator pos_;
104  IntervalStore* intervals_;
105};
106
107// Collection of live ranges allocated to the same register.
108// It supports efficiently finding all conflicts for a given, non-allocated
109// range. See AllocatedInterval.
110// Allocated live ranges do not intersect. At most, individual use intervals
111// touch. We store, for a live range, an AllocatedInterval corresponding to each
112// of that range's UseIntervals. We keep the list of AllocatedIntervals sorted
113// by starts. Then, given the non-intersecting property, we know that
114// consecutive AllocatedIntervals have the property that the "smaller"'s end is
115// less or equal to the "larger"'s start.
116// This allows for quick (logarithmic complexity) identification of the first
117// AllocatedInterval to conflict with a given LiveRange, and then for efficient
118// traversal of conflicts.
119class CoalescedLiveRanges : public ZoneObject {
120 public:
121  explicit CoalescedLiveRanges(Zone* zone) : intervals_(zone) {}
122  void clear() { intervals_.clear(); }
123
124  bool empty() const { return intervals_.empty(); }
125
126  // Iterate over each live range conflicting with the provided one.
127  // The same live range may be observed multiple, but non-consecutive times.
128  LiveRangeConflictIterator GetConflicts(const LiveRange* range);
129
130
131  // Allocates a range with a pre-calculated candidate weight.
132  void AllocateRange(LiveRange* range);
133
134  // Unit testing API, verifying that allocated intervals do not overlap.
135  bool VerifyAllocationsAreValidForTesting() const;
136
137 private:
138  static const float kAllocatedRangeMultiplier;
139
140  IntervalStore& intervals() { return intervals_; }
141  const IntervalStore& intervals() const { return intervals_; }
142
143  // Augment the weight of a range that is about to be allocated.
144  static void UpdateWeightAtAllocation(LiveRange* range);
145
146  // Reduce the weight of a range that has lost allocation.
147  static void UpdateWeightAtEviction(LiveRange* range);
148
149
150  IntervalStore intervals_;
151  DISALLOW_COPY_AND_ASSIGN(CoalescedLiveRanges);
152};
153
154
155}  // namespace compiler
156}  // namespace internal
157}  // namespace v8
158#endif  // V8_COALESCED_LIVE_RANGES_H_
159