Bug 1071403 - Allow evicting multiple intervals at once in the backtracking allocator when considering aliased registers, r=sunfish.

This commit is contained in:
Brian Hackett 2015-02-18 15:18:06 -07:00
parent d2a24551f7
commit 01d7c211a0
2 changed files with 84 additions and 29 deletions

View File

@ -471,7 +471,7 @@ static const size_t MAX_ATTEMPTS = 2;
bool bool
BacktrackingAllocator::tryAllocateFixed(LiveInterval *interval, bool *success, BacktrackingAllocator::tryAllocateFixed(LiveInterval *interval, bool *success,
bool *pfixed, LiveInterval **pconflicting) bool *pfixed, LiveIntervalVector &conflicting)
{ {
// Spill intervals which are required to be in a certain stack slot. // Spill intervals which are required to be in a certain stack slot.
if (!interval->requirement()->allocation().isRegister()) { if (!interval->requirement()->allocation().isRegister()) {
@ -482,12 +482,12 @@ BacktrackingAllocator::tryAllocateFixed(LiveInterval *interval, bool *success,
} }
AnyRegister reg = interval->requirement()->allocation().toRegister(); AnyRegister reg = interval->requirement()->allocation().toRegister();
return tryAllocateRegister(registers[reg.code()], interval, success, pfixed, pconflicting); return tryAllocateRegister(registers[reg.code()], interval, success, pfixed, conflicting);
} }
bool bool
BacktrackingAllocator::tryAllocateNonFixed(LiveInterval *interval, bool *success, BacktrackingAllocator::tryAllocateNonFixed(LiveInterval *interval, bool *success,
bool *pfixed, LiveInterval **pconflicting) bool *pfixed, LiveIntervalVector &conflicting)
{ {
// If we want, but do not require an interval to be in a specific // If we want, but do not require an interval to be in a specific
// register, only look at that register for allocating and evict // register, only look at that register for allocating and evict
@ -496,7 +496,7 @@ BacktrackingAllocator::tryAllocateNonFixed(LiveInterval *interval, bool *success
// and will tie up more registers than if we spilled. // and will tie up more registers than if we spilled.
if (interval->hint()->kind() == Requirement::FIXED) { if (interval->hint()->kind() == Requirement::FIXED) {
AnyRegister reg = interval->hint()->allocation().toRegister(); AnyRegister reg = interval->hint()->allocation().toRegister();
if (!tryAllocateRegister(registers[reg.code()], interval, success, pfixed, pconflicting)) if (!tryAllocateRegister(registers[reg.code()], interval, success, pfixed, conflicting))
return false; return false;
if (*success) if (*success)
return true; return true;
@ -511,11 +511,11 @@ BacktrackingAllocator::tryAllocateNonFixed(LiveInterval *interval, bool *success
return true; return true;
} }
if (!*pconflicting || minimalInterval(interval)) { if (conflicting.empty() || minimalInterval(interval)) {
// Search for any available register which the interval can be // Search for any available register which the interval can be
// allocated to. // allocated to.
for (size_t i = 0; i < AnyRegister::Total; i++) { for (size_t i = 0; i < AnyRegister::Total; i++) {
if (!tryAllocateRegister(registers[i], interval, success, pfixed, pconflicting)) if (!tryAllocateRegister(registers[i], interval, success, pfixed, conflicting))
return false; return false;
if (*success) if (*success)
return true; return true;
@ -568,19 +568,19 @@ BacktrackingAllocator::processInterval(LiveInterval *interval)
bool canAllocate = setIntervalRequirement(interval); bool canAllocate = setIntervalRequirement(interval);
bool fixed; bool fixed;
LiveInterval *conflict = nullptr; LiveIntervalVector conflicting;
for (size_t attempt = 0;; attempt++) { for (size_t attempt = 0;; attempt++) {
if (canAllocate) { if (canAllocate) {
bool success = false; bool success = false;
fixed = false; fixed = false;
conflict = nullptr; conflicting.clear();
// Ok, let's try allocating for this interval. // Ok, let's try allocating for this interval.
if (interval->requirement()->kind() == Requirement::FIXED) { if (interval->requirement()->kind() == Requirement::FIXED) {
if (!tryAllocateFixed(interval, &success, &fixed, &conflict)) if (!tryAllocateFixed(interval, &success, &fixed, conflicting))
return false; return false;
} else { } else {
if (!tryAllocateNonFixed(interval, &success, &fixed, &conflict)) if (!tryAllocateNonFixed(interval, &success, &fixed, conflicting))
return false; return false;
} }
@ -588,15 +588,17 @@ BacktrackingAllocator::processInterval(LiveInterval *interval)
if (success) if (success)
return true; return true;
// If that didn't work, but we have a non-fixed LiveInterval known // If that didn't work, but we have one or more non-fixed intervals
// to be conflicting, maybe we can evict it and try again. // known to be conflicting, maybe we can evict them and try again.
if (attempt < MAX_ATTEMPTS && if (attempt < MAX_ATTEMPTS &&
!fixed && !fixed &&
conflict && !conflicting.empty() &&
computeSpillWeight(conflict) < computeSpillWeight(interval)) maximumSpillWeight(conflicting) < computeSpillWeight(interval))
{ {
if (!evictInterval(conflict)) for (size_t i = 0; i < conflicting.length(); i++) {
return false; if (!evictInterval(conflicting[i]))
return false;
}
continue; continue;
} }
} }
@ -607,6 +609,7 @@ BacktrackingAllocator::processInterval(LiveInterval *interval)
// be constructed so that any minimal interval is allocatable. // be constructed so that any minimal interval is allocatable.
MOZ_ASSERT(!minimalInterval(interval)); MOZ_ASSERT(!minimalInterval(interval));
LiveInterval *conflict = conflicting.empty() ? nullptr : conflicting[0];
return chooseIntervalSplit(interval, canAllocate && fixed, conflict); return chooseIntervalSplit(interval, canAllocate && fixed, conflict);
} }
} }
@ -782,7 +785,7 @@ BacktrackingAllocator::tryAllocateGroupRegister(PhysicalRegister &r, VirtualRegi
bool bool
BacktrackingAllocator::tryAllocateRegister(PhysicalRegister &r, LiveInterval *interval, BacktrackingAllocator::tryAllocateRegister(PhysicalRegister &r, LiveInterval *interval,
bool *success, bool *pfixed, LiveInterval **pconflicting) bool *success, bool *pfixed, LiveIntervalVector &conflicting)
{ {
*success = false; *success = false;
@ -796,6 +799,8 @@ BacktrackingAllocator::tryAllocateRegister(PhysicalRegister &r, LiveInterval *in
MOZ_ASSERT_IF(interval->requirement()->kind() == Requirement::FIXED, MOZ_ASSERT_IF(interval->requirement()->kind() == Requirement::FIXED,
interval->requirement()->allocation() == LAllocation(r.reg)); interval->requirement()->allocation() == LAllocation(r.reg));
LiveIntervalVector aliasedConflicting;
for (size_t i = 0; i < interval->numRanges(); i++) { for (size_t i = 0; i < interval->numRanges(); i++) {
AllocatedRange range(interval, interval->getRange(i)), existing; AllocatedRange range(interval, interval->getRange(i)), existing;
for (size_t a = 0; a < r.reg.numAliased(); a++) { for (size_t a = 0; a < r.reg.numAliased(); a++) {
@ -803,26 +808,63 @@ BacktrackingAllocator::tryAllocateRegister(PhysicalRegister &r, LiveInterval *in
if (!rAlias.allocations.contains(range, &existing)) if (!rAlias.allocations.contains(range, &existing))
continue; continue;
if (existing.interval->hasVreg()) { if (existing.interval->hasVreg()) {
if (JitSpewEnabled(JitSpew_RegAlloc)) { MOZ_ASSERT(existing.interval->getAllocation()->toRegister() == rAlias.reg);
JitSpew(JitSpew_RegAlloc, " %s collides with v%u[%u] %s [weight %lu]", bool duplicate = false;
rAlias.reg.name(), existing.interval->vreg(), for (size_t i = 0; i < aliasedConflicting.length(); i++) {
existing.interval->index(), if (aliasedConflicting[i] == existing.interval) {
existing.range->toString(), duplicate = true;
computeSpillWeight(existing.interval)); break;
}
} }
if (!*pconflicting || computeSpillWeight(existing.interval) < computeSpillWeight(*pconflicting)) if (!duplicate && !aliasedConflicting.append(existing.interval))
*pconflicting = existing.interval; return false;
} else { } else {
if (JitSpewEnabled(JitSpew_RegAlloc)) { if (JitSpewEnabled(JitSpew_RegAlloc)) {
JitSpew(JitSpew_RegAlloc, " %s collides with fixed use %s", JitSpew(JitSpew_RegAlloc, " %s collides with fixed use %s",
rAlias.reg.name(), existing.range->toString()); rAlias.reg.name(), existing.range->toString());
} }
*pfixed = true; *pfixed = true;
return true;
} }
return true;
} }
} }
if (!aliasedConflicting.empty()) {
// One or more aliased registers is allocated to another live interval
// overlapping this one. Keep track of the conflicting set, and in the
// case of multiple conflicting sets keep track of the set with the
// lowest maximum spill weight.
if (JitSpewEnabled(JitSpew_RegAlloc)) {
if (aliasedConflicting.length() == 1) {
LiveInterval *existing = aliasedConflicting[0];
JitSpew(JitSpew_RegAlloc, " %s collides with v%u[%u] %s [weight %lu]",
r.reg.name(), existing->vreg(), existing->index(),
existing->rangesToString(), computeSpillWeight(existing));
} else {
JitSpew(JitSpew_RegAlloc, " %s collides with the following", r.reg.name());
for (size_t i = 0; i < aliasedConflicting.length(); i++) {
LiveInterval *existing = aliasedConflicting[i];
JitSpew(JitSpew_RegAlloc, " v%u[%u] %s [weight %lu]",
existing->vreg(), existing->index(),
existing->rangesToString(), computeSpillWeight(existing));
}
}
}
if (conflicting.empty()) {
if (!conflicting.appendAll(aliasedConflicting))
return false;
} else {
if (maximumSpillWeight(aliasedConflicting) < maximumSpillWeight(conflicting)) {
conflicting.clear();
if (!conflicting.appendAll(aliasedConflicting))
return false;
}
}
return true;
}
JitSpew(JitSpew_RegAlloc, " allocated to %s", r.reg.name()); JitSpew(JitSpew_RegAlloc, " allocated to %s", r.reg.name());
for (size_t i = 0; i < interval->numRanges(); i++) { for (size_t i = 0; i < interval->numRanges(); i++) {
@ -1792,6 +1834,15 @@ BacktrackingAllocator::computeSpillWeight(const VirtualRegisterGroup *group)
return maxWeight; return maxWeight;
} }
size_t
BacktrackingAllocator::maximumSpillWeight(const LiveIntervalVector &intervals)
{
size_t maxWeight = 0;
for (size_t i = 0; i < intervals.length(); i++)
maxWeight = Max(maxWeight, computeSpillWeight(intervals[i]));
return maxWeight;
}
bool bool
BacktrackingAllocator::trySplitAcrossHotcode(LiveInterval *interval, bool *success) BacktrackingAllocator::trySplitAcrossHotcode(LiveInterval *interval, bool *success)
{ {

View File

@ -211,13 +211,15 @@ class BacktrackingAllocator
bool tryGroupRegisters(uint32_t vreg0, uint32_t vreg1); bool tryGroupRegisters(uint32_t vreg0, uint32_t vreg1);
bool tryGroupReusedRegister(uint32_t def, uint32_t use); bool tryGroupReusedRegister(uint32_t def, uint32_t use);
bool groupAndQueueRegisters(); bool groupAndQueueRegisters();
bool tryAllocateFixed(LiveInterval *interval, bool *success, bool *pfixed, LiveInterval **pconflicting); bool tryAllocateFixed(LiveInterval *interval, bool *success, bool *pfixed,
bool tryAllocateNonFixed(LiveInterval *interval, bool *success, bool *pfixed, LiveInterval **pconflicting); LiveIntervalVector &conflicting);
bool tryAllocateNonFixed(LiveInterval *interval, bool *success, bool *pfixed,
LiveIntervalVector &conflicting);
bool processInterval(LiveInterval *interval); bool processInterval(LiveInterval *interval);
bool processGroup(VirtualRegisterGroup *group); bool processGroup(VirtualRegisterGroup *group);
bool setIntervalRequirement(LiveInterval *interval); bool setIntervalRequirement(LiveInterval *interval);
bool tryAllocateRegister(PhysicalRegister &r, LiveInterval *interval, bool tryAllocateRegister(PhysicalRegister &r, LiveInterval *interval,
bool *success, bool *pfixed, LiveInterval **pconflicting); bool *success, bool *pfixed, LiveIntervalVector &conflicting);
bool tryAllocateGroupRegister(PhysicalRegister &r, VirtualRegisterGroup *group, bool tryAllocateGroupRegister(PhysicalRegister &r, VirtualRegisterGroup *group,
bool *psuccess, bool *pfixed, LiveInterval **pconflicting); bool *psuccess, bool *pfixed, LiveInterval **pconflicting);
bool evictInterval(LiveInterval *interval); bool evictInterval(LiveInterval *interval);
@ -261,6 +263,8 @@ class BacktrackingAllocator
size_t computePriority(const VirtualRegisterGroup *group); size_t computePriority(const VirtualRegisterGroup *group);
size_t computeSpillWeight(const VirtualRegisterGroup *group); size_t computeSpillWeight(const VirtualRegisterGroup *group);
size_t maximumSpillWeight(const LiveIntervalVector &intervals);
bool chooseIntervalSplit(LiveInterval *interval, bool fixed, LiveInterval *conflict); bool chooseIntervalSplit(LiveInterval *interval, bool fixed, LiveInterval *conflict);
bool splitAt(LiveInterval *interval, bool splitAt(LiveInterval *interval,