Skip to content

Commit

Permalink
.
Browse files Browse the repository at this point in the history
Signed-off-by: Alyssa Rosenzweig <[email protected]>
  • Loading branch information
alyssarosenzweig committed May 8, 2024
1 parent 12add31 commit 0622a02
Showing 1 changed file with 27 additions and 31 deletions.
58 changes: 27 additions & 31 deletions FEXCore/Source/Interface/IR/Passes/RegisterAllocationPass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -236,15 +236,27 @@ bool ConstrainedRAPass::Run(IREmitter* IREmit) {
auto FreeReg = [this](auto Reg) {
bool Pair = Reg.Class == GPRPairClass;
auto ClassType = Pair ? GPRClass : Reg.Class;
auto RegBits = (Pair ? 0x3 : 0x1) << Reg.Reg;
auto RegBits = (Pair ? 0b11 : 0b1) << Reg.Reg;

auto Class = &Classes[ClassType];
LOGMAN_THROW_AA_FMT(!(Class->Available & RegBits), "Register double-free");
Class->Available |= RegBits;
};

auto SpillReg = [&IR, &IREmit, &SpillSlotCount, &SpillSlots, &SSAToReg, &FreeReg, &Map, &Unmap, &IsOld,
&NextUses](auto Class, IROp_Header* Exclude, bool Pair) {
auto HasSource = [&IR, &IsOld, &Unmap](auto Header, auto Old) {
LOGMAN_THROW_AA_FMT(IsOld(Old), "Invariant");

foreach_arg(Header, _, Arg) {
if (Unmap(IR.GetNode(Arg)) == Old) {
return true;
}
}

return false;
};

auto SpillReg = [&IR, &IREmit, &SpillSlotCount, &SpillSlots, &SSAToReg, &FreeReg, &Map, &IsOld, &NextUses,
&HasSource](auto Class, IROp_Header* Exclude, bool Pair) {
// First, find the best node to spill. We use the well-known
// "furthest-first" heuristic, spilling the node whose next-use is the
// farthest in the future.
Expand All @@ -268,17 +280,9 @@ bool ConstrainedRAPass::Run(IREmitter* IREmit) {
OrderedNode* Old = Class->RegToSSA[i];

LOGMAN_THROW_AA_FMT(Old != nullptr, "Invariant");
LOGMAN_THROW_AA_FMT(IsOld(Old), "Invariant");
LOGMAN_THROW_AA_FMT(SSAToReg.at(IR.GetID(Map(Old)).Value).Reg == i, "Invariant'");

bool Excluded = false;
foreach_arg(Exclude, _, Arg) {
if (Unmap(IR.GetNode(Arg)) == Old) {
Excluded = true;
}
}

if (!Excluded) {
if (!HasSource(Exclude, Old)) {
uint32_t NextUse = NextUses.at(IR.GetID(Old).Value);
if (NextUse < BestDistance) {
BestDistance = NextUse;
Expand All @@ -287,7 +291,6 @@ bool ConstrainedRAPass::Run(IREmitter* IREmit) {
}
}


// TODO: Cleaner solution?
auto Header = IR.GetOp<IROp_Header>(Old);
if (GetRegClassFromNode(&IR, Header) == GPRPairClass) {
Expand Down Expand Up @@ -345,13 +348,12 @@ bool ConstrainedRAPass::Run(IREmitter* IREmit) {
auto AvailableMask = [](auto Class, bool Pair) {
uint32_t Available = Class->Available;

// Limit Available to only valid base registers for pairs
if (Pair) {
// Only choose register R if R and R + 1 are both free.
// Only choose base register R if R and R + 1 are both free
Available &= (Available >> 1);

// Only consider aligned registers
Available &= EVEN_BITS & PAIR_BITS;
// Only consider aligned registers in the pair region
Available &= (EVEN_BITS & PAIR_BITS);
}

return Available;
Expand Down Expand Up @@ -384,10 +386,7 @@ bool ConstrainedRAPass::Run(IREmitter* IREmit) {
//
// When spilling for pairs, SpillReg prioritizes spilling the pair region
// which ensures this loop is well-behaved in that case too.
//
// TODO: Maybe specialize this function for pairs vs not-pairs?
while (std::popcount(Class->Available) < (Pair ? 2 : 1) || (Pair && !(Class->Available & PAIR_BITS))) {

IREmit->SetWriteCursorBefore(CodeNode);
SpillReg(Class, Pivot, Pair);
}
Expand All @@ -400,26 +399,24 @@ bool ConstrainedRAPass::Run(IREmitter* IREmit) {
LOGMAN_THROW_AA_FMT(OrigClassType == GPRPairClass, "Already spilled");

// First, find a free scalar. There must be at least 2.
Available = Class->Available;
unsigned Hole = std::countr_zero(Available);
unsigned Hole = std::countr_zero(Class->Available);
LOGMAN_THROW_AA_FMT(Class->Available & (1 << Hole), "Definition");

// Its neighbour is blocking the pair.
unsigned Blocked = Hole ^ 1;
LOGMAN_THROW_AA_FMT(!(Class->Available & (1 << Blocked)), "Invariant");
LOGMAN_THROW_AA_FMT((1 << Hole) & PAIR_BITS, "Only spilled registers available for pairs");
LOGMAN_THROW_AA_FMT((1 << Hole) & PAIR_BITS, "Pairable register");

// Find another free scalar to evict the neighbour
uint32_t AvailableAfter = Available & ~(1 << Hole);
uint32_t AvailableAfter = Class->Available & ~(1 << Hole);
unsigned NewReg = std::countr_zero(AvailableAfter);
LOGMAN_THROW_AA_FMT(Class->Available & (1 << NewReg), "Ensured space");

// Now just evict.
IREmit->SetWriteCursorBefore(CodeNode);
auto Old = Class->RegToSSA[Blocked];
IREmit->SetWriteCursorBefore(CodeNode);

LOGMAN_THROW_AA_FMT(GetRegClassFromNode(&IR, IR.GetOp<IROp_Header>(Old)) == GPRClass, "Must be a scalar due to alignment and free "
"neighbour");
LOGMAN_THROW_AA_FMT(GetRegClassFromNode(&IR, IR.GetOp<IROp_Header>(Old)) == GPRClass, "Only scalars have free neighbours");
auto Copy = IREmit->_Copy(Map(Old));

Remap(Old, Copy);
Expand All @@ -431,8 +428,7 @@ bool ConstrainedRAPass::Run(IREmitter* IREmit) {

LOGMAN_THROW_AA_FMT(Available != 0, "Post-condition of spill and shuffle");

// Assign a free register in the appropriate class
// Now that we have split live ranges, this must succeed.
// Assign a free register in the appropriate class.
unsigned Reg = std::countr_zero(Available);
SetReg(CodeNode, Class, PhysicalRegister(OrigClassType, Reg));
};
Expand All @@ -443,8 +439,8 @@ bool ConstrainedRAPass::Run(IREmitter* IREmit) {
Class.Available = (1 << Class.Count) - 1;
}

// Stream of sources in the block, backwards. (First element is the last
// source in the block.)
// Backwards stream of sources in the block. First element is the last
// source in the block.
//
// Contains the next-use distance (relative to the end of the block) of the
// source following this instruction.
Expand Down

0 comments on commit 0622a02

Please sign in to comment.