Lines Matching refs:interval

96 static bool ShouldProcess(bool processing_core_registers, LiveInterval* interval) {
97 if (interval == nullptr) return false;
98 bool is_core_register = (interval->GetType() != Primitive::kPrimDouble)
99 && (interval->GetType() != Primitive::kPrimFloat);
137 LiveInterval* interval = location.IsRegister()
143 if (interval == nullptr) {
144 interval = LiveInterval::MakeFixedInterval(allocator_, reg, type);
146 physical_core_register_intervals_[reg] = interval;
148 physical_fp_register_intervals_[reg] = interval;
151 DCHECK(interval->GetRegister() == reg);
152 interval->AddRange(start, end);
169 // Iterate post-order, to ensure the list is sorted, and the last added interval
198 // Fixed interval is added to inactive_ instead of unhandled_.
199 // It's also the only type of inactive interval whose start position
200 // can be after the current interval during linear scan.
201 // Fixed interval is never split and never moves to unhandled_.
218 // Fixed interval is added to inactive_ instead of unhandled_.
219 // It's also the only type of inactive interval whose start position
220 // can be after the current interval during linear scan.
221 // Fixed interval is never split and never moves to unhandled_.
245 LiveInterval* interval =
247 temp_intervals_.push_back(interval);
248 interval->AddTempUse(instruction, i);
249 unhandled_core_intervals_.push_back(interval);
254 LiveInterval* interval =
256 temp_intervals_.push_back(interval);
257 interval->AddTempUse(instruction, i);
259 interval->AddHighInterval(/* is_temp */ true);
260 LiveInterval* high = interval->GetHighInterval();
264 unhandled_fp_intervals_.push_back(interval);
295 // By adding the following interval in the algorithm, we can compute this
297 LiveInterval* interval = LiveInterval::MakeSlowPathInterval(allocator_, instruction);
298 interval->AddRange(position, position + 1);
299 AddSorted(&unhandled_core_intervals_, interval);
300 AddSorted(&unhandled_fp_intervals_, interval);
348 // Hole in the interval.
357 // given register, we create an interval that covers these locations. The register
359 // interval.
376 // Shift the interval's start by one to account for the blocked register.
398 // If needed, add interval to the list of unhandled intervals.
405 // of this new interval might be after intervals already in the list.
420 explicit AllRangesIterator(LiveInterval* interval)
421 : current_interval_(interval),
422 current_range_(interval->GetFirstRange()) {}
496 // Allocate a bit vector per register. A live interval that has a register
567 void RegisterAllocator::DumpInterval(std::ostream& stream, LiveInterval* interval) const {
568 interval->Dump(stream);
570 if (interval->HasRegister()) {
571 if (interval->IsFloatingPoint()) {
572 codegen_->DumpFloatingPointRegister(stream, interval->GetRegister());
574 codegen_->DumpCoreRegister(stream, interval->GetRegister());
606 // (1) Remove interval with the lowest start position from unhandled.
610 // Make sure the interval is an expected state.
614 // Make sure a low interval is always with a high.
616 // Make sure a high interval is always with a low.
633 [this, position](LiveInterval* interval) {
634 if (interval->IsDeadAt(position)) {
635 handled_.push_back(interval);
637 } else if (!interval->Covers(position)) {
638 inactive_.push_back(interval);
641 return false; // Keep this interval.
652 [this, position](LiveInterval* interval) {
653 DCHECK(interval->GetStart() < position || interval->IsFixed());
654 if (interval->IsDeadAt(position)) {
655 handled_.push_back(interval);
657 } else if (interval->Covers(position)) {
658 active_.push_back(interval);
661 return false; // Keep this interval.
667 // Synthesized interval to record the maximum number of live registers
682 // Allocating the low part was unsucessful. The splitted interval for the high part
695 // (6) If the interval had a register allocated, add it to the list of active
709 static void FreeIfNotCoverAt(LiveInterval* interval, size_t position, size_t* free_until) {
710 DCHECK(!interval->IsHighInterval());
714 if (interval->IsDeadAt(position)) {
717 free_until[interval->GetRegister()] = kMaxLifetimePosition;
718 if (interval->HasHighInterval()) {
719 DCHECK(interval->GetHighInterval()->IsDeadAt(position));
720 free_until[interval->GetHighInterval()->GetRegister()] = kMaxLifetimePosition;
722 } else if (!interval->CoversSlow(position)) {
723 // The interval becomes inactive at `defined_by`. We make its register
725 free_until[interval->GetRegister()] = interval->FirstUseAfter(position);
726 if (interval->HasHighInterval()) {
727 DCHECK(!interval->GetHighInterval()->CoversSlow(position));
728 free_until[interval->GetHighInterval()->GetRegister()] = free_until[interval->GetRegister()];
743 // For each active interval, set its register to not free.
744 for (LiveInterval* interval : active_) {
745 DCHECK(interval->HasRegister());
746 free_until[interval->GetRegister()] = 0;
749 // An interval that starts an instruction (that is, it is not split), may
757 // Take the last interval of the input. It is the location of that interval
759 LiveInterval* interval = defined_by->InputAt(i)->GetLiveInterval()->GetLastSibling();
760 // Note that interval may have not been processed yet.
763 && interval->HasRegister()
764 && interval->SameRegisterKind(*current)) {
769 DCHECK(interval->CoversSlow(defined_by->GetLifetimePosition()));
771 FreeIfNotCoverAt(interval, position, free_until);
777 // For each inactive interval, set its register to be free until
780 // Temp/Slow-path-safepoint interval has no holes.
784 // Thanks to SSA, a non-split interval starting in a hole of an
785 // inactive interval should never intersect with that inactive interval.
793 // Already used by some active interval. No need to intersect.
835 // If the high register of this interval is not available, we need to spill.
939 // Remove interval and its other half if any. Return iterator to the following element.
943 LiveInterval* interval = *pos;
944 if (interval->IsLowInterval()) {
946 DCHECK_EQ(*(pos + 1), interval->GetHighInterval());
948 } else if (interval->IsHighInterval()) {
950 DCHECK_EQ(*(pos - 1), interval->GetLowInterval());
967 // Split the first interval found that is either:
968 // 1) A non-pair interval.
969 // 2) A pair interval whose high is not low + 1.
970 // 3) A pair interval whose low is not even.
986 // Find the register that is used the last, and spill the interval
993 // The low interval has allocated the register for the high interval. In
994 // case the low interval had to split both intervals, we may end up in a
995 // situation where the high interval does not have a register use anymore.
997 // uses of the high interval's register, and put the high interval in the
1011 // For each active interval, find the next use of its register after the
1025 // For each inactive interval, find the next use of its register after the
1028 // Temp/Slow-path-safepoint interval has no holes.
1032 // Thanks to SSA, a non-split interval starting in a hole of an
1033 // inactive interval should never intersect with that inactive interval.
1091 // We split the first interval found, and put ourselves first in the
1103 // register, we split this interval just before its first register use.
1111 // Use this register and spill the active and inactives interval that
1136 // Thanks to SSA, a non-split interval starting in a hole of an
1137 // inactive interval should never intersect with that inactive interval.
1151 // If it's inactive, it must start before the current interval.
1172 void RegisterAllocator::AddSorted(ArenaVector<LiveInterval*>* array, LiveInterval* interval) {
1173 DCHECK(!interval->IsFixed() && !interval->HasSpillSlot());
1178 if (current->StartsAfter(interval) && !current->IsHighInterval()) {
1181 } else if ((current->GetStart() == interval->GetStart()) && current->IsSlowPathSafepoint()) {
1182 // Ensure the slow path interval is the last to be processed at its location: we want the
1183 // interval to know all live registers at this location.
1190 // Insert the high interval before the low, to ensure the low is processed before.
1192 if (interval->HasHighInterval()) {
1193 array->insert(insert_pos, { interval->GetHighInterval(), interval });
1194 } else if (interval->HasLowInterval()) {
1195 array->insert(insert_pos, { interval, interval->GetLowInterval() });
1197 array->insert(insert_pos, interval);
1201 LiveInterval* RegisterAllocator::SplitBetween(LiveInterval* interval, size_t from, size_t to) {
1209 return Split(interval, to);
1215 * split the interval at this position. Take the following example (block number is the linear
1224 * B2 needs to split an interval, whose next use is in B4. If we were to split at the
1225 * beginning of B4, B3 would need to do a move between B3 and B4 to ensure the interval
1226 * is now in the correct location. It makes performance worst if the interval is spilled
1230 * interval to the same register as in B1, and therefore avoid doing any
1257 return Split(interval, block_to->GetLifetimeStart());
1260 LiveInterval* RegisterAllocator::Split(LiveInterval* interval, size_t position) {
1261 DCHECK_GE(position, interval->GetStart());
1262 DCHECK(!interval->IsDeadAt(position));
1263 if (position == interval->GetStart()) {
1264 // Spill slot will be allocated when handling `interval` again.
1265 interval->ClearRegister();
1266 if (interval->HasHighInterval()) {
1267 interval->GetHighInterval()->ClearRegister();
1268 } else if (interval->HasLowInterval()) {
1269 interval->GetLowInterval()->ClearRegister();
1271 return interval;
1273 LiveInterval* new_interval = interval->SplitAt(position);
1274 if (interval->HasHighInterval()) {
1275 LiveInterval* high = interval->GetHighInterval()->SplitAt(position);
1278 } else if (interval->HasLowInterval()) {
1279 LiveInterval* low = interval->GetLowInterval()->SplitAt(position);
1287 void RegisterAllocator::AllocateSpillSlotFor(LiveInterval* interval) {
1288 if (interval->IsHighInterval()) {
1289 // The low interval already took care of allocating the spill slot.
1290 DCHECK(!interval->GetLowInterval()->HasRegister());
1291 DCHECK(interval->GetLowInterval()->GetParent()->HasSpillSlot());
1295 LiveInterval* parent = interval->GetParent();
1298 // of this interval already has a spill slot, there is nothing to do.
1323 switch (interval->GetType()) {
1342 LOG(FATAL) << "Unexpected type for interval " << interval->GetType();
1354 size_t end = interval->GetLastSibling()->GetEnd();
1386 LiveInterval* interval = phi->GetLiveInterval();
1397 interval->SetSpillSlot(previous_phi->GetLiveInterval()->GetSpillSlot());
1402 interval->SetSpillSlot(catch_phi_spill_slots_);
1403 catch_phi_spill_slots_ += interval->NeedsTwoSpillSlots() ? 2 : 1;
1538 // a split interval between two blocks: the move has to happen in the successor.
1600 void RegisterAllocator::ConnectSiblings(LiveInterval* interval) {
1601 LiveInterval* current = interval;
1605 && !interval->GetDefinedBy()->IsCurrentMethod()) {
1607 InsertMoveAfter(interval->GetDefinedBy(),
1608 interval->ToLocation(),
1609 interval->NeedsTwoSpillSlots()
1610 ? Location::DoubleStackSlot(interval->GetParent()->GetSpillSlot())
1611 : Location::StackSlot(interval->GetParent()->GetSpillSlot()));
1621 // Walk over all uses covered by this interval, and update the location
1642 AddInputMoveFor(interval->GetDefinedBy(), use->GetUser(), source, expected_location);
1668 // If the next interval starts just after this one, and has a register,
1675 InsertParallelMoveAt(current->GetEnd(), interval->GetDefinedBy(), source, destination);
1685 DCHECK(interval->GetDefinedBy()->IsActualObject())
1686 << interval->GetDefinedBy()->DebugName()
1700 DCHECK(interval->GetDefinedBy()->IsActualObject())
1701 << interval->GetDefinedBy()->DebugName()
1747 void RegisterAllocator::ConnectSplitSiblings(LiveInterval* interval,
1750 if (interval->GetNextSibling() == nullptr) {
1758 LiveInterval* destination = interval->GetSiblingAt(destination_position);
1759 LiveInterval* source = interval->GetSiblingAt(source_position);
1766 LiveInterval* parent = interval->GetParent();
1772 // live interval computation however does not compute a fixed point, and
1788 // `GetSiblingAt` returns the interval whose start and end cover `position`,
1789 // but does not check whether the interval is inactive at that position.
1790 // The only situation where the interval is inactive at that position is in the
1894 LOG(FATAL) << "Unexpected type for interval " << current->GetType();
1931 LiveInterval* interval = liveness_.GetInstructionFromSsaIndex(idx)->GetLiveInterval();
1932 LiveInterval* sibling = interval->GetSiblingAt(block->GetLifetimeStart());
1934 // a lifetime hole in it. `CoversSlow` returns whether the interval is live at that
1944 LiveInterval* interval = liveness_.GetInstructionFromSsaIndex(idx)->GetLiveInterval();
1946 ConnectSplitSiblings(interval, predecessor, block);
1976 // High intervals can be skipped, they are already handled by the low interval.