Searched refs:intervals (Results 1 - 6 of 6) sorted by relevance

/art/runtime/gc/collector/
H A Dimmune_spaces.cc44 std::vector<Interval> intervals;
58 intervals.push_back(Interval(reinterpret_cast<uintptr_t>(image_oat_file->Begin()),
63 intervals.push_back(Interval(space_begin, space_end, /*is_heap*/true));
65 std::sort(intervals.begin(), intervals.end());
71 for (const Interval& interval : intervals) {
80 // interval. Otherwise continue since we never start a new region with non image intervals.
/art/compiler/optimizing/
H A Dregister_allocator_test.cc39 // intervals and registers get allocated to them.
62 bool ValidateIntervals(const ScopedArenaVector<LiveInterval*>& intervals, argument
64 return RegisterAllocator::ValidateIntervals(ArrayRef<LiveInterval* const>(intervals),
104 ScopedArenaVector<LiveInterval*> intervals(GetScopedAllocator()->Adapter());
106 // Test with two intervals of the same range.
109 intervals.push_back(BuildInterval(ranges, arraysize(ranges), GetScopedAllocator(), 0));
110 intervals.push_back(BuildInterval(ranges, arraysize(ranges), GetScopedAllocator(), 1));
111 ASSERT_TRUE(ValidateIntervals(intervals, codegen));
113 intervals[1]->SetRegister(0);
114 ASSERT_FALSE(ValidateIntervals(intervals, codege
[all...]
H A Dregister_allocator.h56 // allocates registers to live intervals.
60 // intervals that intersect each other. Returns false if it failed.
66 // Verifies that live intervals do not conflict. Used by unit testing.
67 static bool ValidateIntervals(ArrayRef<LiveInterval* const> intervals,
H A Dregister_allocator.cc109 bool RegisterAllocator::ValidateIntervals(ArrayRef<LiveInterval* const> intervals, argument
124 for (LiveInterval* start_interval : intervals) {
138 for (LiveInterval* start_interval : intervals) {
188 for (LiveInterval* interval : intervals) {
H A Dregister_allocator_linear_scan.cc180 // intervals belonging to the live-in set of the catch/header block to be spilled.
230 // Create synthesized intervals for temporaries.
380 // If needed, add interval to the list of unhandled intervals.
387 // of this new interval might be after intervals already in the list.
393 // Don't add directly to `unhandled`, temp or safepoint intervals
428 // To simplify unit testing, we eagerly create the array of intervals, and
431 ScopedArenaVector<LiveInterval*> intervals(
436 intervals.push_back(instruction->GetLiveInterval());
445 intervals.push_back(fixed);
451 intervals
823 RemoveIntervalAndPotentialOtherHalf( ScopedArenaVector<LiveInterval*>* intervals, ScopedArenaVector<LiveInterval*>::iterator pos) argument
[all...]
H A Dregister_allocator_graph_color.cc33 // progress guarantees. Forward progress for the algorithm means splitting live intervals on
35 // to color. The main threat to forward progress is trying to split short intervals which cannot be
37 // change. This is avoided by prioritizing short intervals before long ones, so that long
38 // intervals are split when coloring fails.
171 // Tiny intervals should have maximum priority, since they cannot be split any further.
195 // short intervals; we do not benefit much if we split them further.
201 // a set of adjacent nodes corresponding to intervals overlapping with its own. To save memory,
397 // This is essentially based on use density and location; short intervals with many uses inside
409 // we prioritize intervals that require registers, and after that we prioritize
410 // short intervals
630 ScopedArenaVector<LiveInterval*>& intervals = processing_core_regs local
851 ScopedArenaVector<LiveInterval*>& intervals = IsCoreInterval(interval) local
1183 BuildInterferenceGraph( const ScopedArenaVector<LiveInterval*>& intervals, const ScopedArenaVector<InterferenceNode*>& physical_nodes) argument
1756 BuildConflictMask(const Container& intervals) argument
1991 ColorSpillSlots(ArrayRef<LiveInterval* const> intervals, size_t* num_stack_slots_used) argument
[all...]

Completed in 86 milliseconds