diff --git a/jlm/hls/backend/rvsdg2rhls/ThetaConversion.cpp b/jlm/hls/backend/rvsdg2rhls/ThetaConversion.cpp index 6ef9fc707..57e65889a 100644 --- a/jlm/hls/backend/rvsdg2rhls/ThetaConversion.cpp +++ b/jlm/hls/backend/rvsdg2rhls/ThetaConversion.cpp @@ -22,26 +22,27 @@ ConvertThetaNode(rvsdg::ThetaNode & theta) // smap. for (size_t i = 0; i < theta.ninputs(); i++) { + auto loopvar = theta.MapInputLoopVar(*theta.input(i)); // Check if the input is a loop invariant such that a loop constant buffer should be created. // Memory state inputs are not loop variables containting a value, so we ignor these. - if (is_invariant(theta.input(i)) - && !jlm::rvsdg::is(theta.input(i)->Type())) + if (ThetaLoopVarIsInvariant(loopvar) + && !jlm::rvsdg::is(loopvar.input->Type())) { - smap.insert(theta.input(i)->argument(), loop->add_loopconst(theta.input(i)->origin())); + smap.insert(loopvar.pre, loop->add_loopconst(loopvar.input->origin())); branches.push_back(nullptr); // The HLS loop has no output for this input. The users of the theta output is // therefore redirected to the input origin, as the value is loop invariant. - theta.output(i)->divert_users(theta.input(i)->origin()); + loopvar.output->divert_users(loopvar.input->origin()); } else { jlm::rvsdg::output * buffer; - loop->add_loopvar(theta.input(i)->origin(), &buffer); - smap.insert(theta.input(i)->argument(), buffer); + loop->AddLoopVar(loopvar.input->origin(), &buffer); + smap.insert(loopvar.pre, buffer); // buffer out is only used by branch branches.push_back(*buffer->begin()); // divert theta outputs - theta.output(i)->divert_users(loop->output(loop->noutputs() - 1)); + loopvar.output->divert_users(loop->output(loop->noutputs() - 1)); } } @@ -54,7 +55,7 @@ ConvertThetaNode(rvsdg::ThetaNode & theta) { if (branches[i]) { - branches[i]->divert_to(smap.lookup(theta.input(i)->result()->origin())); + branches[i]->divert_to(smap.lookup(theta.MapInputLoopVar(*theta.input(i)).post->origin())); } } diff --git a/jlm/hls/backend/rvsdg2rhls/add-prints.cpp b/jlm/hls/backend/rvsdg2rhls/add-prints.cpp index d4b8495fe..af3533d1c 100644 --- a/jlm/hls/backend/rvsdg2rhls/add-prints.cpp +++ b/jlm/hls/backend/rvsdg2rhls/add-prints.cpp @@ -84,7 +84,7 @@ route_to_region(jlm::rvsdg::output * output, rvsdg::Region * region) } else if (auto theta = dynamic_cast(region->node())) { - output = theta->add_loopvar(output)->argument(); + output = theta->AddLoopVar(output).pre; } else if (auto lambda = dynamic_cast(region->node())) { diff --git a/jlm/hls/backend/rvsdg2rhls/add-triggers.cpp b/jlm/hls/backend/rvsdg2rhls/add-triggers.cpp index b19624b40..a016c1b06 100644 --- a/jlm/hls/backend/rvsdg2rhls/add-triggers.cpp +++ b/jlm/hls/backend/rvsdg2rhls/add-triggers.cpp @@ -106,7 +106,7 @@ add_triggers(rvsdg::Region * region) { JLM_ASSERT(trigger != nullptr); JLM_ASSERT(get_trigger(t->subregion()) == nullptr); - t->add_loopvar(trigger); + t->AddLoopVar(trigger); add_triggers(t->subregion()); } else if (auto gn = dynamic_cast(node)) diff --git a/jlm/hls/backend/rvsdg2rhls/distribute-constants.cpp b/jlm/hls/backend/rvsdg2rhls/distribute-constants.cpp index 6c4b64b05..7fd439520 100644 --- a/jlm/hls/backend/rvsdg2rhls/distribute-constants.cpp +++ b/jlm/hls/backend/rvsdg2rhls/distribute-constants.cpp @@ -24,29 +24,27 @@ distribute_constant(const rvsdg::SimpleOperation & op, rvsdg::simple_output * ou changed = false; for (auto user : *out) { - auto node = rvsdg::input::GetNode(*user); - if (auto ti = dynamic_cast(user)) + if (auto theta = rvsdg::TryGetOwnerNode(*user)) { - auto arg = ti->argument(); - auto res = ti->result(); - if (res->origin() == arg) + auto loopvar = theta->MapInputLoopVar(*user); + if (loopvar.post->origin() == loopvar.pre) { // pass-through auto arg_replacement = dynamic_cast( - rvsdg::SimpleNode::create_normalized(ti->node()->subregion(), op, {})[0]); - ti->argument()->divert_users(arg_replacement); - ti->output()->divert_users( + rvsdg::SimpleNode::create_normalized(theta->subregion(), op, {})[0]); + loopvar.pre->divert_users(arg_replacement); + loopvar.output->divert_users( rvsdg::SimpleNode::create_normalized(out->region(), op, {})[0]); distribute_constant(op, arg_replacement); - arg->region()->RemoveResult(res->index()); - arg->region()->RemoveArgument(arg->index()); - arg->region()->node()->RemoveInput(arg->input()->index()); - arg->region()->node()->RemoveOutput(res->output()->index()); + theta->subregion()->RemoveResult(loopvar.post->index()); + theta->subregion()->RemoveArgument(loopvar.pre->index()); + theta->RemoveInput(loopvar.input->index()); + theta->RemoveOutput(loopvar.output->index()); changed = true; break; } } - if (auto gammaNode = dynamic_cast(node)) + if (auto gammaNode = rvsdg::TryGetOwnerNode(*user)) { if (gammaNode->predicate() == user) { diff --git a/jlm/hls/backend/rvsdg2rhls/mem-queue.cpp b/jlm/hls/backend/rvsdg2rhls/mem-queue.cpp index 67ed15714..c3b2609a9 100644 --- a/jlm/hls/backend/rvsdg2rhls/mem-queue.cpp +++ b/jlm/hls/backend/rvsdg2rhls/mem-queue.cpp @@ -213,7 +213,7 @@ separate_load_edge( auto loop_node = jlm::util::AssertedCast(sti->node()); jlm::rvsdg::output * buffer; - addr_edge = loop_node->add_loopvar(addr_edge, &buffer); + addr_edge = loop_node->AddLoopVar(addr_edge, &buffer); addr_edge_user->divert_to(addr_edge); mem_edge = find_loop_output(sti); auto sti_arg = sti->arguments.first(); diff --git a/jlm/hls/backend/rvsdg2rhls/mem-sep.cpp b/jlm/hls/backend/rvsdg2rhls/mem-sep.cpp index 883175cf8..0b18e52f2 100644 --- a/jlm/hls/backend/rvsdg2rhls/mem-sep.cpp +++ b/jlm/hls/backend/rvsdg2rhls/mem-sep.cpp @@ -116,9 +116,9 @@ route_through(rvsdg::Region * target, jlm::rvsdg::output * response) } else if (auto tn = dynamic_cast(target->node())) { - auto lv = tn->add_loopvar(parent_response); - parrent_user->divert_to(lv); - return lv->argument(); + auto lv = tn->AddLoopVar(parent_response); + parrent_user->divert_to(lv.output); + return lv.pre; } JLM_UNREACHABLE("THIS SHOULD NOT HAPPEN"); } @@ -183,13 +183,12 @@ trace_edge( JLM_ASSERT(new_edge->nusers() == 1); auto user = *common_edge->begin(); auto new_next = *new_edge->begin(); - auto node = rvsdg::input::GetNode(*user); if (auto res = dynamic_cast(user)) { // end of region reached return res; } - else if (auto gammaNode = dynamic_cast(node)) + else if (auto gammaNode = rvsdg::TryGetOwnerNode(*user)) { auto ip = gammaNode->AddEntryVar(new_edge); std::vector vec; @@ -208,13 +207,13 @@ trace_edge( common_edge = subres->output(); } } - else if (auto ti = dynamic_cast(user)) + else if (auto theta = rvsdg::TryGetOwnerNode(*user)) { - auto tn = ti->node(); - auto lv = tn->add_loopvar(new_edge); - trace_edge(ti->argument(), lv->argument(), load_nodes, store_nodes, decouple_nodes); - common_edge = ti->output(); - new_edge = lv; + auto olv = theta->MapInputLoopVar(*user); + auto lv = theta->AddLoopVar(new_edge); + trace_edge(olv.pre, lv.pre, load_nodes, store_nodes, decouple_nodes); + common_edge = olv.output; + new_edge = lv.output; new_next->divert_to(new_edge); } else if (auto si = dynamic_cast(user)) diff --git a/jlm/hls/backend/rvsdg2rhls/rvsdg2rhls.cpp b/jlm/hls/backend/rvsdg2rhls/rvsdg2rhls.cpp index 3567f468b..f5e1dfaff 100644 --- a/jlm/hls/backend/rvsdg2rhls/rvsdg2rhls.cpp +++ b/jlm/hls/backend/rvsdg2rhls/rvsdg2rhls.cpp @@ -113,9 +113,9 @@ trace_call(jlm::rvsdg::input * input) auto argument = dynamic_cast(input->origin()); const jlm::rvsdg::output * result; - if (auto to = dynamic_cast(input->origin())) + if (auto theta = rvsdg::TryGetOwnerNode(*input->origin())) { - result = trace_call(to->input()); + result = trace_call(theta->MapOutputLoopVar(*input->origin()).input); } else if (argument == nullptr) { diff --git a/jlm/hls/ir/hls.cpp b/jlm/hls/ir/hls.cpp index afb5eb0e1..6628ce87c 100644 --- a/jlm/hls/ir/hls.cpp +++ b/jlm/hls/ir/hls.cpp @@ -66,7 +66,7 @@ ExitResult::Copy(rvsdg::output & origin, rvsdg::StructuralOutput * output) } rvsdg::StructuralOutput * -loop_node::add_loopvar(jlm::rvsdg::output * origin, jlm::rvsdg::output ** buffer) +loop_node::AddLoopVar(jlm::rvsdg::output * origin, jlm::rvsdg::output ** buffer) { auto input = rvsdg::StructuralInput::create(this, origin, origin->Type()); auto output = rvsdg::StructuralOutput::create(this, origin->Type()); diff --git a/jlm/hls/ir/hls.hpp b/jlm/hls/ir/hls.hpp index b67444dfc..997e8ddf4 100644 --- a/jlm/hls/ir/hls.hpp +++ b/jlm/hls/ir/hls.hpp @@ -787,7 +787,7 @@ class loop_node final : public rvsdg::StructuralNode add_backedge(std::shared_ptr type); rvsdg::StructuralOutput * - add_loopvar(jlm::rvsdg::output * origin, jlm::rvsdg::output ** buffer = nullptr); + AddLoopVar(jlm::rvsdg::output * origin, jlm::rvsdg::output ** buffer = nullptr); jlm::rvsdg::output * add_loopconst(jlm::rvsdg::output * origin); diff --git a/jlm/hls/opt/cne.cpp b/jlm/hls/opt/cne.cpp index 94745c79c..cbf238733 100644 --- a/jlm/hls/opt/cne.cpp +++ b/jlm/hls/opt/cne.cpp @@ -183,33 +183,40 @@ congruent(jlm::rvsdg::output * o1, jlm::rvsdg::output * o2, vset & vs, cnectx & if (o1->type() != o2->type()) return false; - if (is(o1) && is(o2)) + if (auto theta1 = rvsdg::TryGetRegionParentNode(*o1)) { - JLM_ASSERT(o1->region()->node() == o2->region()->node()); - auto a1 = static_cast(o1); - auto a2 = static_cast(o2); - vs.insert(a1, a2); - auto i1 = a1->input(), i2 = a2->input(); - if (!congruent(a1->input()->origin(), a2->input()->origin(), vs, ctx)) - return false; + if (auto theta2 = rvsdg::TryGetRegionParentNode(*o2)) + { + JLM_ASSERT(o1->region()->node() == o2->region()->node()); + auto loopvar1 = theta1->MapPreLoopVar(*o1); + auto loopvar2 = theta2->MapPreLoopVar(*o2); + vs.insert(o1, o2); + auto i1 = loopvar1.input, i2 = loopvar2.input; + if (!congruent(loopvar1.input->origin(), loopvar2.input->origin(), vs, ctx)) + return false; - auto output1 = o1->region()->node()->output(i1->index()); - auto output2 = o2->region()->node()->output(i2->index()); - return congruent(output1, output2, vs, ctx); + auto output1 = o1->region()->node()->output(i1->index()); + auto output2 = o2->region()->node()->output(i2->index()); + return congruent(output1, output2, vs, ctx); + } } - auto n1 = jlm::rvsdg::output::GetNode(*o1); - auto n2 = jlm::rvsdg::output::GetNode(*o2); - if (is(n1) && is(n2) && n1 == n2) + if (auto theta1 = rvsdg::TryGetOwnerNode(*o1)) { - auto so1 = static_cast(o1); - auto so2 = static_cast(o2); - vs.insert(o1, o2); - auto r1 = so1->results.first(); - auto r2 = so2->results.first(); - return congruent(r1->origin(), r2->origin(), vs, ctx); + if (auto theta2 = rvsdg::TryGetOwnerNode(*o2)) + { + vs.insert(o1, o2); + auto loopvar1 = theta1->MapOutputLoopVar(*o1); + auto loopvar2 = theta2->MapOutputLoopVar(*o2); + auto r1 = loopvar1.post; + auto r2 = loopvar2.post; + return congruent(r1->origin(), r2->origin(), vs, ctx); + } } + auto n1 = jlm::rvsdg::output::GetNode(*o1); + auto n2 = jlm::rvsdg::output::GetNode(*o2); + auto a1 = dynamic_cast(o1); auto a2 = dynamic_cast(o2); if (a1 && is(a1->region()->node()) && a2 && is(a2->region()->node())) @@ -331,10 +338,12 @@ mark_theta(const rvsdg::StructuralNode * node, cnectx & ctx) { auto input1 = theta->input(i1); auto input2 = theta->input(i2); - if (congruent(input1->argument(), input2->argument(), ctx)) + auto loopvar1 = theta->MapInputLoopVar(*input1); + auto loopvar2 = theta->MapInputLoopVar(*input2); + if (congruent(loopvar1.pre, loopvar2.pre, ctx)) { - ctx.mark(input1->argument(), input2->argument()); - ctx.mark(input1->output(), input2->output()); + ctx.mark(loopvar1.pre, loopvar2.pre); + ctx.mark(loopvar1.output, loopvar2.output); } } } @@ -530,11 +539,10 @@ divert_theta(rvsdg::StructuralNode * node, cnectx & ctx) auto theta = static_cast(node); auto subregion = node->subregion(0); - for (const auto & lv : *theta) + for (const auto & lv : theta->GetLoopVars()) { - JLM_ASSERT(ctx.set(lv->argument())->size() == ctx.set(lv)->size()); - divert_users(lv->argument(), ctx); - divert_users(lv, ctx); + JLM_ASSERT(ctx.set(lv.pre)->size() == ctx.set(lv.output)->size()); + divert_users(lv.pre, ctx); } divert(subregion, ctx); diff --git a/jlm/hls/util/view.cpp b/jlm/hls/util/view.cpp index c1a7a8561..e296140dc 100644 --- a/jlm/hls/util/view.cpp +++ b/jlm/hls/util/view.cpp @@ -367,9 +367,10 @@ region_to_dot(rvsdg::Region * region) { dot << edge(be->argument(), be, true); } - else if (auto to = dynamic_cast(region->result(i)->output())) + else if (auto theta = rvsdg::TryGetOwnerNode(*region->result(i)->output())) { - dot << edge(to->argument(), to->result(), true); + auto loopvar = theta->MapOutputLoopVar(*region->result(i)->output()); + dot << edge(loopvar.pre, loopvar.post, true); } } diff --git a/jlm/llvm/frontend/InterProceduralGraphConversion.cpp b/jlm/llvm/frontend/InterProceduralGraphConversion.cpp index d23f15b35..36bcc4b03 100644 --- a/jlm/llvm/frontend/InterProceduralGraphConversion.cpp +++ b/jlm/llvm/frontend/InterProceduralGraphConversion.cpp @@ -765,7 +765,7 @@ Convert( * Add loop variables */ auto & demandSet = demandMap.Lookup(loopAggregationNode); - std::unordered_map thetaOutputMap; + std::unordered_map thetaLoopVarMap; for (auto & v : demandSet.LoopVariables().Variables()) { rvsdg::output * value = nullptr; @@ -778,8 +778,9 @@ Convert( { value = outerVariableMap.lookup(&v); } - thetaOutputMap[&v] = theta->add_loopvar(value); - thetaVariableMap.insert(&v, thetaOutputMap[&v]->argument()); + auto loopvar = theta->AddLoopVar(value); + thetaLoopVarMap[&v] = loopvar; + thetaVariableMap.insert(&v, loopvar.pre); } /* @@ -797,8 +798,8 @@ Convert( */ for (auto & v : demandSet.LoopVariables().Variables()) { - JLM_ASSERT(thetaOutputMap.find(&v) != thetaOutputMap.end()); - thetaOutputMap[&v]->result()->divert_to(thetaVariableMap.lookup(&v)); + JLM_ASSERT(thetaLoopVarMap.find(&v) != thetaLoopVarMap.end()); + thetaLoopVarMap[&v].post->divert_to(thetaVariableMap.lookup(&v)); } /* @@ -820,7 +821,7 @@ Convert( for (auto & v : demandSet.LoopVariables().Variables()) { JLM_ASSERT(outerVariableMap.contains(&v)); - outerVariableMap.insert(&v, thetaOutputMap[&v]); + outerVariableMap.insert(&v, thetaLoopVarMap[&v].output); } } diff --git a/jlm/llvm/ir/operators/call.cpp b/jlm/llvm/ir/operators/call.cpp index b77882da3..eff1c8b13 100644 --- a/jlm/llvm/ir/operators/call.cpp +++ b/jlm/llvm/ir/operators/call.cpp @@ -66,17 +66,22 @@ invariantInput( return nullptr; } -static rvsdg::ThetaInput * -invariantInput(const rvsdg::ThetaOutput & output, InvariantOutputMap & invariantOutputs) +static rvsdg::input * +invariantInput( + const rvsdg::ThetaNode & theta, + const rvsdg::output & output, + InvariantOutputMap & invariantOutputs) { - auto origin = output.result()->origin(); + auto loopvar = theta.MapOutputLoopVar(output); + + auto origin = loopvar.post->origin(); while (true) { - if (origin == output.argument()) + if (origin == loopvar.pre) { - invariantOutputs[&output] = output.input(); - return output.input(); + invariantOutputs[&output] = loopvar.input; + return loopvar.input; } if (auto input = invariantInput(*origin, invariantOutputs)) @@ -101,13 +106,13 @@ invariantInput(const rvsdg::output & output, InvariantOutputMap & invariantOutpu if (invariantOutputs.find(&output) != invariantOutputs.end()) return invariantOutputs[&output]; - if (auto thetaOutput = dynamic_cast(&output)) - return invariantInput(*thetaOutput, invariantOutputs); + if (auto theta = rvsdg::TryGetOwnerNode(output)) + return invariantInput(*theta, output, invariantOutputs); - if (auto thetaArgument = dynamic_cast(&output)) + if (auto theta = rvsdg::TryGetRegionParentNode(output)) { - auto thetaInput = static_cast(thetaArgument->input()); - return invariantInput(*thetaInput->output(), invariantOutputs); + auto loopvar = theta->MapPreLoopVar(output); + return invariantInput(*loopvar.output, invariantOutputs); } if (auto gamma = rvsdg::TryGetOwnerNode(output)) @@ -205,9 +210,9 @@ CallNode::TraceFunctionInput(const CallNode & callNode) continue; } - if (auto thetaOutput = dynamic_cast(origin)) + if (rvsdg::TryGetOwnerNode(*origin)) { - if (auto input = invariantInput(*thetaOutput)) + if (auto input = invariantInput(*origin)) { origin = input->origin(); continue; @@ -216,9 +221,9 @@ CallNode::TraceFunctionInput(const CallNode & callNode) return origin; } - if (auto thetaArgument = dynamic_cast(origin)) + if (rvsdg::TryGetRegionParentNode(*origin)) { - if (auto input = invariantInput(*thetaArgument)) + if (auto input = invariantInput(*origin)) { origin = input->origin(); continue; diff --git a/jlm/llvm/ir/operators/lambda.cpp b/jlm/llvm/ir/operators/lambda.cpp index f0dbabc58..099e82082 100644 --- a/jlm/llvm/ir/operators/lambda.cpp +++ b/jlm/llvm/ir/operators/lambda.cpp @@ -304,17 +304,17 @@ node::ComputeCallSummary() const continue; } - if (auto theta_input = dynamic_cast(input)) + if (auto theta = rvsdg::TryGetOwnerNode(*input)) { - auto argument = theta_input->argument(); - worklist.insert(worklist.end(), argument->begin(), argument->end()); + auto loopvar = theta->MapInputLoopVar(*input); + worklist.insert(worklist.end(), loopvar.pre->begin(), loopvar.pre->end()); continue; } - if (auto thetaResult = dynamic_cast(input)) + if (auto theta = rvsdg::TryGetRegionParentNode(*input)) { - auto output = thetaResult->output(); - worklist.insert(worklist.end(), output->begin(), output->end()); + auto loopvar = theta->MapPostLoopVar(*input); + worklist.insert(worklist.end(), loopvar.output->begin(), loopvar.output->end()); continue; } diff --git a/jlm/llvm/opt/DeadNodeElimination.cpp b/jlm/llvm/opt/DeadNodeElimination.cpp index d672d69a3..1945f2f6d 100644 --- a/jlm/llvm/opt/DeadNodeElimination.cpp +++ b/jlm/llvm/opt/DeadNodeElimination.cpp @@ -214,19 +214,20 @@ DeadNodeElimination::MarkOutput(const jlm::rvsdg::output & output) return; } - if (auto thetaOutput = dynamic_cast(&output)) + if (auto theta = rvsdg::TryGetOwnerNode(output)) { - MarkOutput(*thetaOutput->node()->predicate()->origin()); - MarkOutput(*thetaOutput->result()->origin()); - MarkOutput(*thetaOutput->input()->origin()); + auto loopvar = theta->MapOutputLoopVar(output); + MarkOutput(*theta->predicate()->origin()); + MarkOutput(*loopvar.post->origin()); + MarkOutput(*loopvar.input->origin()); return; } - if (auto thetaArgument = dynamic_cast(&output)) + if (auto theta = rvsdg::TryGetRegionParentNode(output)) { - auto thetaInput = util::AssertedCast(thetaArgument->input()); - MarkOutput(*thetaInput->output()); - MarkOutput(*thetaInput->origin()); + auto loopvar = theta->MapPreLoopVar(output); + MarkOutput(*loopvar.output); + MarkOutput(*loopvar.input->origin()); return; } @@ -435,16 +436,16 @@ DeadNodeElimination::SweepTheta(rvsdg::ThetaNode & thetaNode) const { auto & thetaSubregion = *thetaNode.subregion(); - auto matchOutput = [&](const rvsdg::ThetaOutput & output) + auto matchOutput = [&](const rvsdg::output & output) { - auto & argument = *output.argument(); - return !Context_->IsAlive(argument) && !Context_->IsAlive(output); + auto loopvar = thetaNode.MapOutputLoopVar(output); + return !Context_->IsAlive(*loopvar.pre) && !Context_->IsAlive(*loopvar.output); }; auto deadInputs = thetaNode.RemoveThetaOutputsWhere(matchOutput); SweepRegion(thetaSubregion); - auto matchInput = [&](const rvsdg::ThetaInput & input) + auto matchInput = [&](const rvsdg::input & input) { return deadInputs.Contains(&input); }; diff --git a/jlm/llvm/opt/InvariantValueRedirection.cpp b/jlm/llvm/opt/InvariantValueRedirection.cpp index 8b842532f..f02b07e0d 100644 --- a/jlm/llvm/opt/InvariantValueRedirection.cpp +++ b/jlm/llvm/opt/InvariantValueRedirection.cpp @@ -152,15 +152,15 @@ InvariantValueRedirection::RedirectGammaOutputs(rvsdg::GammaNode & gammaNode) void InvariantValueRedirection::RedirectThetaOutputs(rvsdg::ThetaNode & thetaNode) { - for (const auto & thetaOutput : thetaNode) + for (const auto & loopVar : thetaNode.GetLoopVars()) { // FIXME: In order to also redirect I/O state type variables, we need to know whether a loop // terminates. - if (rvsdg::is(thetaOutput->type())) + if (rvsdg::is(loopVar.input->type())) continue; - if (rvsdg::is_invariant(thetaOutput)) - thetaOutput->divert_users(thetaOutput->input()->origin()); + if (rvsdg::ThetaLoopVarIsInvariant(loopVar)) + loopVar.output->divert_users(loopVar.input->origin()); } } diff --git a/jlm/llvm/opt/alias-analyses/Andersen.cpp b/jlm/llvm/opt/alias-analyses/Andersen.cpp index d71a33515..0c142403e 100644 --- a/jlm/llvm/opt/alias-analyses/Andersen.cpp +++ b/jlm/llvm/opt/alias-analyses/Andersen.cpp @@ -1126,13 +1126,13 @@ Andersen::AnalyzeTheta(const rvsdg::ThetaNode & theta) { // Create a PointerObject for each argument in the inner region // And make it point to a superset of the corresponding input register - for (const auto thetaOutput : theta) + for (const auto & loopVar : theta.GetLoopVars()) { - if (!IsOrContainsPointerType(thetaOutput->type())) + if (!IsOrContainsPointerType(loopVar.input->type())) continue; - auto & inputReg = *thetaOutput->input()->origin(); - auto & innerArgumentReg = *thetaOutput->argument(); + auto & inputReg = *loopVar.input->origin(); + auto & innerArgumentReg = *loopVar.pre; const auto inputRegPO = Set_->GetRegisterPointerObject(inputReg); const auto innerArgumentRegPO = Set_->CreateRegisterPointerObject(innerArgumentReg); @@ -1144,14 +1144,14 @@ Andersen::AnalyzeTheta(const rvsdg::ThetaNode & theta) // Iterate over loop variables again, making the inner arguments point to a superset // of what the corresponding result registers point to - for (const auto thetaOutput : theta) + for (const auto & loopVar : theta.GetLoopVars()) { - if (!IsOrContainsPointerType(thetaOutput->type())) + if (!IsOrContainsPointerType(loopVar.input->type())) continue; - auto & innerArgumentReg = *thetaOutput->argument(); - auto & innerResultReg = *thetaOutput->result()->origin(); - auto & outputReg = *thetaOutput; + auto & innerArgumentReg = *loopVar.pre; + auto & innerResultReg = *loopVar.post->origin(); + auto & outputReg = *loopVar.output; const auto innerArgumentRegPO = Set_->GetRegisterPointerObject(innerArgumentReg); const auto innerResultRegPO = Set_->GetRegisterPointerObject(innerResultReg); diff --git a/jlm/llvm/opt/alias-analyses/MemoryStateEncoder.cpp b/jlm/llvm/opt/alias-analyses/MemoryStateEncoder.cpp index 81f02bb8c..29f508897 100644 --- a/jlm/llvm/opt/alias-analyses/MemoryStateEncoder.cpp +++ b/jlm/llvm/opt/alias-analyses/MemoryStateEncoder.cpp @@ -899,20 +899,20 @@ MemoryStateEncoder::EncodeTheta(rvsdg::ThetaNode & thetaNode) Context_->GetRegionalizedStateMap().PopRegion(*thetaNode.subregion()); } -std::vector +std::vector MemoryStateEncoder::EncodeThetaEntry(rvsdg::ThetaNode & thetaNode) { auto region = thetaNode.region(); auto & stateMap = Context_->GetRegionalizedStateMap(); auto & memoryNodes = Context_->GetMemoryNodeProvisioning().GetThetaEntryExitNodes(thetaNode); - std::vector thetaStateOutputs; + std::vector thetaStateOutputs; auto memoryNodeStatePairs = stateMap.GetStates(*region, memoryNodes); for (auto & memoryNodeStatePair : memoryNodeStatePairs) { - auto thetaStateOutput = thetaNode.add_loopvar(&memoryNodeStatePair->State()); - stateMap.InsertState(memoryNodeStatePair->MemoryNode(), *thetaStateOutput->argument()); - thetaStateOutputs.push_back(thetaStateOutput); + auto loopvar = thetaNode.AddLoopVar(&memoryNodeStatePair->State()); + stateMap.InsertState(memoryNodeStatePair->MemoryNode(), *loopvar.pre); + thetaStateOutputs.push_back(loopvar.output); } return thetaStateOutputs; @@ -921,7 +921,7 @@ MemoryStateEncoder::EncodeThetaEntry(rvsdg::ThetaNode & thetaNode) void MemoryStateEncoder::EncodeThetaExit( rvsdg::ThetaNode & thetaNode, - const std::vector & thetaStateOutputs) + const std::vector & thetaStateOutputs) { auto subregion = thetaNode.subregion(); auto & stateMap = Context_->GetRegionalizedStateMap(); @@ -934,10 +934,11 @@ MemoryStateEncoder::EncodeThetaExit( auto thetaStateOutput = thetaStateOutputs[n]; auto & memoryNodeStatePair = memoryNodeStatePairs[n]; auto & memoryNode = memoryNodeStatePair->MemoryNode(); - JLM_ASSERT(thetaStateOutput->input()->origin() == &memoryNodeStatePair->State()); + auto loopvar = thetaNode.MapOutputLoopVar(*thetaStateOutput); + JLM_ASSERT(loopvar.input->origin() == &memoryNodeStatePair->State()); auto & subregionState = stateMap.GetState(*subregion, memoryNode)->State(); - thetaStateOutput->result()->divert_to(&subregionState); + loopvar.post->divert_to(&subregionState); memoryNodeStatePair->ReplaceState(*thetaStateOutput); } } diff --git a/jlm/llvm/opt/alias-analyses/MemoryStateEncoder.hpp b/jlm/llvm/opt/alias-analyses/MemoryStateEncoder.hpp index 9929f2bb2..63833aa73 100644 --- a/jlm/llvm/opt/alias-analyses/MemoryStateEncoder.hpp +++ b/jlm/llvm/opt/alias-analyses/MemoryStateEncoder.hpp @@ -157,13 +157,13 @@ class MemoryStateEncoder final void EncodeTheta(rvsdg::ThetaNode & thetaNode); - std::vector + std::vector EncodeThetaEntry(rvsdg::ThetaNode & thetaNode); void EncodeThetaExit( rvsdg::ThetaNode & thetaNode, - const std::vector & thetaStateOutputs); + const std::vector & thetaStateOutputs); /** * Replace \p loadNode with a new copy that takes the provided \p memoryStates. All users of the diff --git a/jlm/llvm/opt/alias-analyses/Steensgaard.cpp b/jlm/llvm/opt/alias-analyses/Steensgaard.cpp index ebf3ff311..9ec56963d 100644 --- a/jlm/llvm/opt/alias-analyses/Steensgaard.cpp +++ b/jlm/llvm/opt/alias-analyses/Steensgaard.cpp @@ -236,13 +236,13 @@ class RegisterLocation final : public Location return jlm::util::strfmt(dbgstr, ":arg", index); } - if (is(Output_)) + if (rvsdg::TryGetRegionParentNode(*Output_)) { auto dbgstr = Output_->region()->node()->GetOperation().debug_string(); return jlm::util::strfmt(dbgstr, ":arg", index); } - if (is(Output_)) + if (rvsdg::TryGetOwnerNode(*Output_)) { auto dbgstr = jlm::rvsdg::output::GetNode(*Output_)->GetOperation().debug_string(); return jlm::util::strfmt(dbgstr, ":out", index); @@ -1658,12 +1658,12 @@ Steensgaard::AnalyzeGamma(const rvsdg::GammaNode & node) void Steensgaard::AnalyzeTheta(const rvsdg::ThetaNode & theta) { - for (auto thetaOutput : theta) + for (const auto & loopVar : theta.GetLoopVars()) { - if (HasOrContainsPointerType(*thetaOutput)) + if (HasOrContainsPointerType(*loopVar.output)) { - auto & originLocation = Context_->GetLocation(*thetaOutput->input()->origin()); - auto & argumentLocation = Context_->GetOrInsertRegisterLocation(*thetaOutput->argument()); + auto & originLocation = Context_->GetLocation(*loopVar.input->origin()); + auto & argumentLocation = Context_->GetOrInsertRegisterLocation(*loopVar.pre); Context_->Join(argumentLocation, originLocation); } @@ -1671,13 +1671,13 @@ Steensgaard::AnalyzeTheta(const rvsdg::ThetaNode & theta) AnalyzeRegion(*theta.subregion()); - for (auto thetaOutput : theta) + for (const auto & loopVar : theta.GetLoopVars()) { - if (HasOrContainsPointerType(*thetaOutput)) + if (HasOrContainsPointerType(*loopVar.output)) { - auto & originLocation = Context_->GetLocation(*thetaOutput->result()->origin()); - auto & argumentLocation = Context_->GetLocation(*thetaOutput->argument()); - auto & outputLocation = Context_->GetOrInsertRegisterLocation(*thetaOutput); + auto & originLocation = Context_->GetLocation(*loopVar.post->origin()); + auto & argumentLocation = Context_->GetLocation(*loopVar.pre); + auto & outputLocation = Context_->GetOrInsertRegisterLocation(*loopVar.output); Context_->Join(originLocation, argumentLocation); Context_->Join(originLocation, outputLocation); diff --git a/jlm/llvm/opt/cne.cpp b/jlm/llvm/opt/cne.cpp index def2530aa..96c364792 100644 --- a/jlm/llvm/opt/cne.cpp +++ b/jlm/llvm/opt/cne.cpp @@ -180,33 +180,40 @@ congruent(jlm::rvsdg::output * o1, jlm::rvsdg::output * o2, vset & vs, cnectx & if (o1->type() != o2->type()) return false; - if (is(o1) && is(o2)) + if (auto theta1 = rvsdg::TryGetRegionParentNode(*o1)) { - JLM_ASSERT(o1->region()->node() == o2->region()->node()); - auto a1 = static_cast(o1); - auto a2 = static_cast(o2); - vs.insert(a1, a2); - auto i1 = a1->input(), i2 = a2->input(); - if (!congruent(a1->input()->origin(), a2->input()->origin(), vs, ctx)) - return false; + if (auto theta2 = rvsdg::TryGetRegionParentNode(*o2)) + { + JLM_ASSERT(o1->region()->node() == o2->region()->node()); + auto loopvar1 = theta1->MapPreLoopVar(*o1); + auto loopvar2 = theta2->MapPreLoopVar(*o2); + vs.insert(o1, o2); + auto i1 = loopvar1.input, i2 = loopvar2.input; + if (!congruent(loopvar1.input->origin(), loopvar2.input->origin(), vs, ctx)) + return false; - auto output1 = o1->region()->node()->output(i1->index()); - auto output2 = o2->region()->node()->output(i2->index()); - return congruent(output1, output2, vs, ctx); + auto output1 = o1->region()->node()->output(i1->index()); + auto output2 = o2->region()->node()->output(i2->index()); + return congruent(output1, output2, vs, ctx); + } } - auto n1 = jlm::rvsdg::output::GetNode(*o1); - auto n2 = jlm::rvsdg::output::GetNode(*o2); - if (is(n1) && is(n2) && n1 == n2) + if (auto theta1 = rvsdg::TryGetOwnerNode(*o1)) { - auto so1 = static_cast(o1); - auto so2 = static_cast(o2); - vs.insert(o1, o2); - auto r1 = so1->results.first(); - auto r2 = so2->results.first(); - return congruent(r1->origin(), r2->origin(), vs, ctx); + if (auto theta2 = rvsdg::TryGetOwnerNode(*o2)) + { + vs.insert(o1, o2); + auto loopvar1 = theta1->MapOutputLoopVar(*o1); + auto loopvar2 = theta2->MapOutputLoopVar(*o2); + auto r1 = loopvar1.post; + auto r2 = loopvar2.post; + return congruent(r1->origin(), r2->origin(), vs, ctx); + } } + auto n1 = jlm::rvsdg::output::GetNode(*o1); + auto n2 = jlm::rvsdg::output::GetNode(*o2); + if (rvsdg::is(n1) && n1 == n2) { auto so1 = static_cast(o1); @@ -315,10 +322,12 @@ mark_theta(const rvsdg::StructuralNode * node, cnectx & ctx) { auto input1 = theta->input(i1); auto input2 = theta->input(i2); - if (congruent(input1->argument(), input2->argument(), ctx)) + auto loopvar1 = theta->MapInputLoopVar(*input1); + auto loopvar2 = theta->MapInputLoopVar(*input2); + if (congruent(loopvar1.pre, loopvar2.pre, ctx)) { - ctx.mark(input1->argument(), input2->argument()); - ctx.mark(input1->output(), input2->output()); + ctx.mark(loopvar1.pre, loopvar2.pre); + ctx.mark(loopvar1.output, loopvar2.output); } } } @@ -491,11 +500,11 @@ divert_theta(rvsdg::StructuralNode * node, cnectx & ctx) auto theta = static_cast(node); auto subregion = node->subregion(0); - for (const auto & lv : *theta) + for (const auto & lv : theta->GetLoopVars()) { - JLM_ASSERT(ctx.set(lv->argument())->size() == ctx.set(lv)->size()); - divert_users(lv->argument(), ctx); - divert_users(lv, ctx); + JLM_ASSERT(ctx.set(lv.pre)->size() == ctx.set(lv.output)->size()); + divert_users(lv.pre, ctx); + divert_users(lv.output, ctx); } divert(subregion, ctx); diff --git a/jlm/llvm/opt/inlining.cpp b/jlm/llvm/opt/inlining.cpp index e24406333..ec9aba8b0 100644 --- a/jlm/llvm/opt/inlining.cpp +++ b/jlm/llvm/opt/inlining.cpp @@ -78,7 +78,7 @@ route_to_region(jlm::rvsdg::output * output, rvsdg::Region * region) } else if (auto theta = dynamic_cast(region->node())) { - output = theta->add_loopvar(output)->argument(); + output = theta->AddLoopVar(output).pre; } else if (auto lambda = dynamic_cast(region->node())) { diff --git a/jlm/llvm/opt/inversion.cpp b/jlm/llvm/opt/inversion.cpp index e75d0cb7a..a0cbce507 100644 --- a/jlm/llvm/opt/inversion.cpp +++ b/jlm/llvm/opt/inversion.cpp @@ -77,14 +77,14 @@ static void pullin(rvsdg::GammaNode * gamma, rvsdg::ThetaNode * theta) { pullin_bottom(gamma); - for (const auto & lv : *theta) + for (const auto & lv : theta->GetLoopVars()) { - if (jlm::rvsdg::output::GetNode(*lv->result()->origin()) != gamma) + if (jlm::rvsdg::output::GetNode(*lv.post->origin()) != gamma) { - auto ev = gamma->AddEntryVar(lv->result()->origin()); + auto ev = gamma->AddEntryVar(lv.post->origin()); JLM_ASSERT(ev.branchArgument.size() == 2); auto xv = gamma->AddExitVar({ ev.branchArgument[0], ev.branchArgument[1] }).output; - lv->result()->divert_to(xv); + lv.post->divert_to(xv); } } pullin_top(gamma); @@ -124,16 +124,16 @@ copy_condition_nodes( } } -static rvsdg::RegionArgument * -to_argument(jlm::rvsdg::output * output) +static jlm::rvsdg::StructuralOutput * +to_structural_output(jlm::rvsdg::output * output) { - return dynamic_cast(output); + return dynamic_cast(output); } -static rvsdg::StructuralOutput * -to_structural_output(jlm::rvsdg::output * output) +static rvsdg::RegionArgument * +to_argument(jlm::rvsdg::output * output) { - return dynamic_cast(output); + return dynamic_cast(output); } static void @@ -148,8 +148,8 @@ invert(rvsdg::ThetaNode * otheta) /* copy condition nodes for new gamma node */ rvsdg::SubstitutionMap smap; auto cnodes = collect_condition_nodes(otheta, ogamma); - for (const auto & olv : *otheta) - smap.insert(olv->argument(), olv->input()->origin()); + for (const auto & olv : otheta->GetLoopVars()) + smap.insert(olv.pre, olv.input->origin()); copy_condition_nodes(otheta->region(), smap, cnodes); auto ngamma = @@ -179,11 +179,11 @@ invert(rvsdg::ThetaNode * otheta) osubregion0->copy(ngamma->subregion(0), r0map, false, false); /* update substitution map for insertion of exit variables */ - for (const auto & olv : *otheta) + for (const auto & olv : otheta->GetLoopVars()) { - auto output = to_structural_output(olv->result()->origin()); + auto output = to_structural_output(olv.post->origin()); auto substitute = r0map.lookup(osubregion0->result(output->index())->origin()); - r0map.insert(olv->result()->origin(), substitute); + r0map.insert(olv.post->origin(), substitute); } } @@ -195,25 +195,25 @@ invert(rvsdg::ThetaNode * otheta) /* add loop variables to new theta node and setup substitution map */ auto osubregion0 = ogamma->subregion(0); auto osubregion1 = ogamma->subregion(1); - std::unordered_map nlvs; - for (const auto & olv : *otheta) + std::unordered_map nlvs; + for (const auto & olv : otheta->GetLoopVars()) { - auto ev = ngamma->AddEntryVar(olv->input()->origin()); - auto nlv = ntheta->add_loopvar(ev.branchArgument[1]); - r1map.insert(olv->argument(), nlv->argument()); - nlvs[olv->input()] = nlv; + auto ev = ngamma->AddEntryVar(olv.input->origin()); + auto nlv = ntheta->AddLoopVar(ev.branchArgument[1]); + r1map.insert(olv.pre, nlv.pre); + nlvs[olv.input] = nlv; } for (const auto & oev : ogamma->GetEntryVars()) { if (auto argument = to_argument(oev.input->origin())) { - r1map.insert(oev.branchArgument[1], nlvs[argument->input()]->argument()); + r1map.insert(oev.branchArgument[1], nlvs[argument->input()].pre); } else { auto ev = ngamma->AddEntryVar(smap.lookup(oev.input->origin())); - auto nlv = ntheta->add_loopvar(ev.branchArgument[1]); - r1map.insert(oev.branchArgument[1], nlv->argument()); + auto nlv = ntheta->AddLoopVar(ev.branchArgument[1]); + r1map.insert(oev.branchArgument[1], nlv.pre); nlvs[oev.input] = nlv; } } @@ -222,11 +222,11 @@ invert(rvsdg::ThetaNode * otheta) osubregion1->copy(ntheta->subregion(), r1map, false, false); /* adjust values in substitution map for condition node copying */ - for (const auto & olv : *otheta) + for (const auto & olv : otheta->GetLoopVars()) { - auto output = to_structural_output(olv->result()->origin()); + auto output = to_structural_output(olv.post->origin()); auto substitute = r1map.lookup(osubregion1->result(output->index())->origin()); - r1map.insert(olv->argument(), substitute); + r1map.insert(olv.pre, substitute); } /* copy condition nodes */ @@ -234,24 +234,24 @@ invert(rvsdg::ThetaNode * otheta) auto predicate = r1map.lookup(ogamma->predicate()->origin()); /* redirect results of loop variables and adjust substitution map for exit region copying */ - for (const auto & olv : *otheta) + for (const auto & olv : otheta->GetLoopVars()) { - auto output = to_structural_output(olv->result()->origin()); + auto output = to_structural_output(olv.post->origin()); auto substitute = r1map.lookup(osubregion1->result(output->index())->origin()); - nlvs[olv->input()]->result()->divert_to(substitute); - r1map.insert(olv->result()->origin(), nlvs[olv->input()]); + nlvs[olv.input].post->divert_to(substitute); + r1map.insert(olv.post->origin(), nlvs[olv.input].output); } for (const auto & oev : ogamma->GetEntryVars()) { if (auto argument = to_argument(oev.input->origin())) { - r1map.insert(oev.branchArgument[0], nlvs[argument->input()]); + r1map.insert(oev.branchArgument[0], nlvs[argument->input()].output); } else { auto substitute = r1map.lookup(oev.input->origin()); - nlvs[oev.input]->result()->divert_to(substitute); - r1map.insert(oev.branchArgument[0], nlvs[oev.input]); + nlvs[oev.input].post->divert_to(substitute); + r1map.insert(oev.branchArgument[0], nlvs[oev.input].output); } } @@ -261,26 +261,26 @@ invert(rvsdg::ThetaNode * otheta) osubregion0->copy(ngamma->subregion(1), r1map, false, false); /* adjust values in substitution map for exit variable creation */ - for (const auto & olv : *otheta) + for (const auto & olv : otheta->GetLoopVars()) { - auto output = to_structural_output(olv->result()->origin()); + auto output = to_structural_output(olv.post->origin()); auto substitute = r1map.lookup(osubregion0->result(output->index())->origin()); - r1map.insert(olv->result()->origin(), substitute); + r1map.insert(olv.post->origin(), substitute); } } /* add exit variables to new gamma */ - for (const auto & olv : *otheta) + for (const auto & olv : otheta->GetLoopVars()) { - auto o0 = r0map.lookup(olv->result()->origin()); - auto o1 = r1map.lookup(olv->result()->origin()); - auto ex = ngamma->AddExitVar({ o0, o1 }).output; - smap.insert(olv, ex); + auto o0 = r0map.lookup(olv.post->origin()); + auto o1 = r1map.lookup(olv.post->origin()); + auto ex = ngamma->AddExitVar({ o0, o1 }); + smap.insert(olv.output, ex.output); } /* replace outputs */ - for (const auto & olv : *otheta) - olv->divert_users(smap.lookup(olv)); + for (const auto & olv : otheta->GetLoopVars()) + olv.output->divert_users(smap.lookup(olv.output)); remove(otheta); } diff --git a/jlm/llvm/opt/push.cpp b/jlm/llvm/opt/push.cpp index aedb52ac8..abba1f5ec 100644 --- a/jlm/llvm/opt/push.cpp +++ b/jlm/llvm/opt/push.cpp @@ -123,7 +123,7 @@ copy_from_gamma(rvsdg::Node * node, size_t r) return arguments; } -static std::vector +static std::vector copy_from_theta(rvsdg::Node * node) { JLM_ASSERT(is(node->region()->node())); @@ -140,13 +140,13 @@ copy_from_theta(rvsdg::Node * node) operands.push_back(argument->input()->origin()); } - std::vector arguments; + std::vector arguments; auto copy = node->copy(target, operands); for (size_t n = 0; n < copy->noutputs(); n++) { - auto lv = theta->add_loopvar(copy->output(n)); - node->output(n)->divert_users(lv->argument()); - arguments.push_back(lv->argument()); + auto lv = theta->AddLoopVar(copy->output(n)); + node->output(n)->divert_users(lv.pre); + arguments.push_back(lv.pre); } return arguments; @@ -210,9 +210,7 @@ push(rvsdg::GammaNode * gamma) } static bool -is_theta_invariant( - const rvsdg::Node * node, - const std::unordered_set & invariants) +is_theta_invariant(const rvsdg::Node * node, const std::unordered_set & invariants) { JLM_ASSERT(is(node->region()->node())); JLM_ASSERT(node->depth() == 0); @@ -241,18 +239,18 @@ push_top(rvsdg::ThetaNode * theta) } /* collect loop invariant arguments */ - std::unordered_set invariants; - for (const auto & lv : *theta) + std::unordered_set invariants; + for (const auto & lv : theta->GetLoopVars()) { - if (lv->result()->origin() == lv->argument()) - invariants.insert(lv->argument()); + if (lv.post->origin() == lv.pre) + invariants.insert(lv.pre); } /* initialize worklist */ worklist wl; - for (const auto & lv : *theta) + for (const auto & lv : theta->GetLoopVars()) { - auto argument = lv->argument(); + auto argument = lv.pre; for (const auto & user : *argument) { auto tmp = jlm::rvsdg::input::GetNode(*user); @@ -334,8 +332,8 @@ pushout_store(rvsdg::Node * storenode) auto ovalue = storenode->input(1)->origin(); /* insert new value for store */ - auto nvalue = theta->add_loopvar(UndefValueOperation::Create(*theta->region(), ovalue->Type())); - nvalue->result()->divert_to(ovalue); + auto nvalue = theta->AddLoopVar(UndefValueOperation::Create(*theta->region(), ovalue->Type())); + nvalue.post->divert_to(ovalue); /* collect store operands */ std::vector states; @@ -349,7 +347,8 @@ pushout_store(rvsdg::Node * storenode) } /* create new store and redirect theta output users */ - auto nstates = StoreNonVolatileNode::Create(address, nvalue, states, storeop->GetAlignment()); + auto nstates = + StoreNonVolatileNode::Create(address, nvalue.output, states, storeop->GetAlignment()); for (size_t n = 0; n < states.size(); n++) { std::unordered_set users; @@ -369,9 +368,9 @@ pushout_store(rvsdg::Node * storenode) void push_bottom(rvsdg::ThetaNode * theta) { - for (const auto & lv : *theta) + for (const auto & lv : theta->GetLoopVars()) { - auto storenode = jlm::rvsdg::output::GetNode(*lv->result()->origin()); + auto storenode = jlm::rvsdg::output::GetNode(*lv.post->origin()); if (jlm::rvsdg::is(storenode) && is_movable_store(storenode)) { pushout_store(storenode); diff --git a/jlm/llvm/opt/unroll.cpp b/jlm/llvm/opt/unroll.cpp index f5a7b62c9..32bb7ec63 100644 --- a/jlm/llvm/opt/unroll.cpp +++ b/jlm/llvm/opt/unroll.cpp @@ -65,14 +65,15 @@ is_theta_invariant(const jlm::rvsdg::output * output) if (jlm::rvsdg::is(jlm::rvsdg::output::GetNode(*output))) return true; - auto argument = dynamic_cast(output); - if (!argument) + auto theta = rvsdg::TryGetRegionParentNode(*output); + if (!theta) return false; - return is_invariant(static_cast(argument->input())); + auto loopVar = theta->MapPreLoopVar(*output); + return ThetaLoopVarIsInvariant(loopVar); } -static rvsdg::RegionArgument * +static rvsdg::output * push_from_theta(jlm::rvsdg::output * output) { auto argument = dynamic_cast(output); @@ -85,10 +86,10 @@ push_from_theta(jlm::rvsdg::output * output) auto theta = static_cast(tmp->region()->node()); auto node = tmp->copy(theta->region(), {}); - auto lv = theta->add_loopvar(node->output(0)); - output->divert_users(lv->argument()); + auto lv = theta->AddLoopVar(node->output(0)); + output->divert_users(lv.pre); - return lv->argument(); + return lv.pre; } static bool @@ -99,12 +100,13 @@ is_idv(jlm::rvsdg::input * input) auto node = rvsdg::input::GetNode(*input); JLM_ASSERT(is(node) || is(node)); - auto a = dynamic_cast(input->origin()); - if (!a) - return false; + if (auto theta = rvsdg::TryGetRegionParentNode(*input->origin())) + { + auto loopvar = theta->MapPreLoopVar(*input->origin()); + return jlm::rvsdg::output::GetNode(*loopvar.post->origin()) == node; + } - auto tinput = static_cast(a->input()); - return jlm::rvsdg::output::GetNode(*tinput->result()->origin()) == node; + return false; } std::unique_ptr @@ -184,8 +186,8 @@ unroll_body( { theta->subregion()->copy(target, smap, false, false); rvsdg::SubstitutionMap tmap; - for (const auto & olv : *theta) - tmap.insert(olv->argument(), smap.lookup(olv->result()->origin())); + for (const auto & olv : theta->GetLoopVars()) + tmap.insert(olv.pre, smap.lookup(olv.post->origin())); smap = tmap; } theta->subregion()->copy(target, smap, false, false); @@ -200,13 +202,13 @@ static void copy_body_and_unroll(const rvsdg::ThetaNode * theta, size_t factor) { rvsdg::SubstitutionMap smap; - for (const auto & olv : *theta) - smap.insert(olv->argument(), olv->input()->origin()); + for (const auto & olv : theta->GetLoopVars()) + smap.insert(olv.pre, olv.input->origin()); unroll_body(theta, theta->region(), smap, factor); - for (const auto & olv : *theta) - olv->divert_users(smap.lookup(olv->result()->origin())); + for (const auto & olv : theta->GetLoopVars()) + olv.output->divert_users(smap.lookup(olv.post->origin())); } /* @@ -219,20 +221,24 @@ unroll_theta(const unrollinfo & ui, rvsdg::SubstitutionMap & smap, size_t factor auto remainder = ui.remainder(factor); auto unrolled_theta = rvsdg::ThetaNode::create(theta->region()); - for (const auto & olv : *theta) + auto oldLoopVars = theta->GetLoopVars(); + for (const auto & olv : oldLoopVars) { - auto nlv = unrolled_theta->add_loopvar(olv->input()->origin()); - smap.insert(olv->argument(), nlv->argument()); + auto nlv = unrolled_theta->AddLoopVar(olv.input->origin()); + smap.insert(olv.pre, nlv.pre); } unroll_body(theta, unrolled_theta->subregion(), smap, factor); unrolled_theta->set_predicate(smap.lookup(theta->predicate()->origin())); - for (auto olv = theta->begin(), nlv = unrolled_theta->begin(); olv != theta->end(); olv++, nlv++) + auto newLoopVars = unrolled_theta->GetLoopVars(); + for (size_t i = 0; i < oldLoopVars.size(); ++i) { - auto origin = smap.lookup((*olv)->result()->origin()); - (*nlv)->result()->divert_to(origin); - smap.insert(*olv, *nlv); + const auto & olv = oldLoopVars[i]; + const auto & nlv = newLoopVars[i]; + auto origin = smap.lookup(olv.post->origin()); + nlv.post->divert_to(origin); + smap.insert(olv.output, nlv.output); } if (remainder != 0) @@ -270,8 +276,8 @@ add_remainder(const unrollinfo & ui, rvsdg::SubstitutionMap & smap, size_t facto We only need to redirect the users of the outputs of the old theta node to the outputs of the new theta node, as there are no residual iterations. */ - for (const auto & olv : *theta) - olv->divert_users(smap.lookup(olv)); + for (const auto & olv : theta->GetLoopVars()) + olv.output->divert_users(smap.lookup(olv.output)); return remove(theta); } @@ -280,8 +286,8 @@ add_remainder(const unrollinfo & ui, rvsdg::SubstitutionMap & smap, size_t facto redirecting the inputs of the old theta to the outputs of the unrolled theta. */ - for (const auto & olv : *theta) - olv->input()->divert_to(smap.lookup(olv)); + for (const auto & olv : theta->GetLoopVars()) + olv.input->divert_to(smap.lookup(olv.output)); if (remainder == 1) { @@ -332,8 +338,8 @@ create_unrolled_gamma_predicate(const unrollinfo & ui, size_t factor) { auto region = ui.theta()->region(); auto nbits = ui.nbits(); - auto step = ui.step()->input()->origin(); - auto end = ui.end()->input()->origin(); + auto step = ui.theta()->MapPreLoopVar(*ui.step()).input->origin(); + auto end = ui.theta()->MapPreLoopVar(*ui.end()).input->origin(); auto uf = jlm::rvsdg::create_bitconstant(region, nbits, factor); auto mul = jlm::rvsdg::bitmul_op::create(nbits, step, uf); @@ -380,8 +386,8 @@ static jlm::rvsdg::output * create_residual_gamma_predicate(const rvsdg::SubstitutionMap & smap, const unrollinfo & ui) { auto region = ui.theta()->region(); - auto idv = smap.lookup(ui.theta()->output(ui.idv()->input()->index())); - auto end = ui.end()->input()->origin(); + auto idv = smap.lookup(ui.theta()->MapPreLoopVar(*ui.idv()).output); + auto end = ui.theta()->MapPreLoopVar(*ui.end()).input->origin(); /* FIXME: order of operands */ auto cmp = jlm::rvsdg::SimpleNode::create_normalized(region, ui.cmpoperation(), { idv, end })[0]; @@ -403,29 +409,34 @@ unroll_unknown_theta(const unrollinfo & ui, size_t factor) auto ntheta = rvsdg::ThetaNode::create(ngamma->subregion(1)); rvsdg::SubstitutionMap rmap[2]; - for (const auto & olv : *otheta) + for (const auto & olv : otheta->GetLoopVars()) { - auto ev = ngamma->AddEntryVar(olv->input()->origin()); - auto nlv = ntheta->add_loopvar(ev.branchArgument[1]); - rmap[0].insert(olv, ev.branchArgument[0]); - rmap[1].insert(olv->argument(), nlv->argument()); + auto ev = ngamma->AddEntryVar(olv.input->origin()); + auto nlv = ntheta->AddLoopVar(ev.branchArgument[1]); + rmap[0].insert(olv.output, ev.branchArgument[0]); + rmap[1].insert(olv.pre, nlv.pre); } unroll_body(otheta, ntheta->subregion(), rmap[1], factor); pred = create_unrolled_theta_predicate(ntheta->subregion(), rmap[1], ui, factor); ntheta->set_predicate(pred); - for (auto olv = otheta->begin(), nlv = ntheta->begin(); olv != otheta->end(); olv++, nlv++) + auto oldLoopVars = otheta->GetLoopVars(); + auto newLoopVars = ntheta->GetLoopVars(); + for (std::size_t n = 0; n < oldLoopVars.size(); ++n) { - auto origin = rmap[1].lookup((*olv)->result()->origin()); - (*nlv)->result()->divert_to(origin); - rmap[1].insert(*olv, *nlv); + auto & olv = oldLoopVars[n]; + auto & nlv = newLoopVars[n]; + auto origin = rmap[1].lookup(olv.post->origin()); + nlv.post->divert_to(origin); + rmap[1].insert(olv.output, nlv.output); } - for (const auto & olv : *otheta) + for (const auto & olv : oldLoopVars) { - auto xv = ngamma->AddExitVar({ rmap[0].lookup(olv), rmap[1].lookup(olv) }).output; - smap.insert(olv, xv); + auto xv = + ngamma->AddExitVar({ rmap[0].lookup(olv.output), rmap[1].lookup(olv.output) }).output; + smap.insert(olv.output, xv); } } @@ -436,27 +447,32 @@ unroll_unknown_theta(const unrollinfo & ui, size_t factor) auto ntheta = rvsdg::ThetaNode::create(ngamma->subregion(1)); rvsdg::SubstitutionMap rmap[2]; - for (const auto & olv : *otheta) + auto oldLoopVars = otheta->GetLoopVars(); + for (const auto & olv : oldLoopVars) { - auto ev = ngamma->AddEntryVar(smap.lookup(olv)); - auto nlv = ntheta->add_loopvar(ev.branchArgument[1]); - rmap[0].insert(olv, ev.branchArgument[0]); - rmap[1].insert(olv->argument(), nlv->argument()); + auto ev = ngamma->AddEntryVar(smap.lookup(olv.output)); + auto nlv = ntheta->AddLoopVar(ev.branchArgument[1]); + rmap[0].insert(olv.output, ev.branchArgument[0]); + rmap[1].insert(olv.pre, nlv.pre); } otheta->subregion()->copy(ntheta->subregion(), rmap[1], false, false); ntheta->set_predicate(rmap[1].lookup(otheta->predicate()->origin())); - for (auto olv = otheta->begin(), nlv = ntheta->begin(); olv != otheta->end(); olv++, nlv++) + auto newLoopVars = ntheta->GetLoopVars(); + + for (std::size_t n = 0; n < oldLoopVars.size(); ++n) { - auto origin = rmap[1].lookup((*olv)->result()->origin()); - (*nlv)->result()->divert_to(origin); - auto xv = ngamma->AddExitVar({ rmap[0].lookup(*olv), *nlv }).output; - smap.insert(*olv, xv); + auto & olv = oldLoopVars[n]; + auto & nlv = newLoopVars[n]; + auto origin = rmap[1].lookup(olv.post->origin()); + nlv.post->divert_to(origin); + auto xv = ngamma->AddExitVar({ rmap[0].lookup(olv.output), nlv.output }).output; + smap.insert(olv.output, xv); } } - for (const auto & olv : *otheta) - olv->divert_users(smap.lookup(olv)); + for (const auto & olv : otheta->GetLoopVars()) + olv.output->divert_users(smap.lookup(olv.output)); remove(otheta); } diff --git a/jlm/llvm/opt/unroll.hpp b/jlm/llvm/opt/unroll.hpp index 5550743ba..7fa5d3244 100644 --- a/jlm/llvm/opt/unroll.hpp +++ b/jlm/llvm/opt/unroll.hpp @@ -53,9 +53,9 @@ class unrollinfo final inline unrollinfo( rvsdg::Node * cmpnode, rvsdg::Node * armnode, - rvsdg::RegionArgument * idv, - rvsdg::RegionArgument * step, - rvsdg::RegionArgument * end) + rvsdg::output * idv, + rvsdg::output * step, + rvsdg::output * end) : end_(end), step_(step), cmpnode_(cmpnode), @@ -132,7 +132,7 @@ class unrollinfo final return *static_cast(&armnode()->GetOperation()); } - inline rvsdg::RegionArgument * + inline rvsdg::output * idv() const noexcept { return idv_; @@ -141,7 +141,7 @@ class unrollinfo final inline jlm::rvsdg::output * init() const noexcept { - return idv()->input()->origin(); + return theta()->MapPreLoopVar(*idv()).input->origin(); } inline const jlm::rvsdg::bitvalue_repr * @@ -150,7 +150,7 @@ class unrollinfo final return value(init()); } - inline rvsdg::RegionArgument * + inline rvsdg::output * step() const noexcept { return step_; @@ -162,7 +162,7 @@ class unrollinfo final return value(step()); } - inline rvsdg::RegionArgument * + inline rvsdg::output * end() const noexcept { return end_; @@ -224,11 +224,11 @@ class unrollinfo final return &static_cast(&p->GetOperation())->value(); } - rvsdg::RegionArgument * end_; - rvsdg::RegionArgument * step_; + rvsdg::output * end_; + rvsdg::output * step_; rvsdg::Node * cmpnode_; rvsdg::Node * armnode_; - rvsdg::RegionArgument * idv_; + rvsdg::output * idv_; }; /** diff --git a/jlm/mlir/frontend/MlirToJlmConverter.cpp b/jlm/mlir/frontend/MlirToJlmConverter.cpp index a7005e0f2..4306968ad 100644 --- a/jlm/mlir/frontend/MlirToJlmConverter.cpp +++ b/jlm/mlir/frontend/MlirToJlmConverter.cpp @@ -367,7 +367,7 @@ MlirToJlmConverter::ConvertOperation( // Add loop vars to the theta node for (size_t i = 0; i < inputs.size(); i++) { - rvsdgThetaNode->add_loopvar(inputs[i]); + rvsdgThetaNode->AddLoopVar(inputs[i]); } auto regionResults = ConvertRegion(mlirThetaNode.getRegion(), *rvsdgThetaNode->subregion()); diff --git a/jlm/rvsdg/node.cpp b/jlm/rvsdg/node.cpp index 05e43c70a..4b6be734d 100644 --- a/jlm/rvsdg/node.cpp +++ b/jlm/rvsdg/node.cpp @@ -340,16 +340,22 @@ producer(const jlm::rvsdg::output * output) noexcept if (auto node = output::GetNode(*output)) return node; + if (auto theta = TryGetRegionParentNode(*output)) + { + auto loopvar = theta->MapPreLoopVar(*output); + if (loopvar.post->origin() != output) + { + return nullptr; + } + return producer(loopvar.input->origin()); + } + JLM_ASSERT(dynamic_cast(output)); auto argument = static_cast(output); if (!argument->input()) return nullptr; - if (is(argument->region()->node()) - && (argument->region()->result(argument->index() + 1)->origin() != argument)) - return nullptr; - return producer(argument->input()->origin()); } diff --git a/jlm/rvsdg/theta.cpp b/jlm/rvsdg/theta.cpp index d9001654c..c6f6b7d38 100644 --- a/jlm/rvsdg/theta.cpp +++ b/jlm/rvsdg/theta.cpp @@ -24,122 +24,232 @@ ThetaOperation::copy() const return std::make_unique(*this); } +/* theta node */ + +ThetaNode::~ThetaNode() noexcept = default; + ThetaNode::ThetaNode(rvsdg::Region & parent) : StructuralNode(ThetaOperation(), &parent, 1) { auto predicate = control_false(subregion()); - ThetaPredicateResult::Create(*predicate); + RegionResult::Create(*subregion(), *predicate, nullptr, ControlType::Create(2)); } -ThetaInput::~ThetaInput() noexcept +ThetaNode::LoopVar +ThetaNode::AddLoopVar(rvsdg::output * origin) { - if (output_) - output_->input_ = nullptr; -} + Node::add_input(std::make_unique(this, origin, origin->Type())); + Node::add_output(std::make_unique(this, origin->Type())); -/* theta output */ + auto input = ThetaNode::input(ninputs() - 1); + auto output = ThetaNode::output(noutputs() - 1); + auto & thetaArgument = RegionArgument::Create(*subregion(), input, input->Type()); + auto & thetaResult = RegionResult::Create(*subregion(), thetaArgument, output, output->Type()); -ThetaOutput::~ThetaOutput() noexcept -{ - if (input_) - input_->output_ = nullptr; + return LoopVar{ input, &thetaArgument, &thetaResult, output }; } -ThetaArgument::~ThetaArgument() noexcept = default; - -ThetaArgument & -ThetaArgument::Copy(rvsdg::Region & region, StructuralInput * input) +ThetaNode * +ThetaNode::copy(rvsdg::Region * region, rvsdg::SubstitutionMap & smap) const { - auto thetaInput = util::AssertedCast(input); - return ThetaArgument::Create(region, *thetaInput); -} + auto nf = graph()->GetNodeNormalForm(typeid(Operation)); + nf->set_mutable(false); -ThetaResult::~ThetaResult() noexcept = default; + rvsdg::SubstitutionMap rmap; + auto theta = create(region); -ThetaResult & -ThetaResult::Copy(rvsdg::output & origin, StructuralOutput * output) -{ - auto thetaOutput = util::AssertedCast(output); - return ThetaResult::Create(origin, *thetaOutput); -} + /* add loop variables */ + std::vector oldLoopVars = GetLoopVars(); + std::vector newLoopVars; + for (auto olv : oldLoopVars) + { + auto nlv = theta->AddLoopVar(smap.lookup(olv.input->origin())); + newLoopVars.push_back(nlv); + rmap.insert(olv.pre, nlv.pre); + } -ThetaPredicateResult::~ThetaPredicateResult() noexcept = default; + /* copy subregion */ + subregion()->copy(theta->subregion(), rmap, false, false); + theta->set_predicate(rmap.lookup(predicate()->origin())); -ThetaPredicateResult & -ThetaPredicateResult::Copy(rvsdg::output & origin, StructuralOutput * output) -{ - JLM_ASSERT(output == nullptr); - return ThetaPredicateResult::Create(origin); -} + /* redirect loop variables */ + for (size_t i = 0; i < oldLoopVars.size(); ++i) + { + newLoopVars[i].post->divert_to(rmap.lookup(oldLoopVars[i].post->origin())); + smap.insert(oldLoopVars[i].output, newLoopVars[i].output); + } -/* theta node */ + nf->set_mutable(true); + return theta; +} -ThetaNode::~ThetaNode() noexcept = default; +[[nodiscard]] ThetaNode::LoopVar +ThetaNode::MapInputLoopVar(const rvsdg::input & input) const +{ + JLM_ASSERT(rvsdg::TryGetOwnerNode(input) == this); + auto peer = MapInputToOutputIndex(input.index()); + return LoopVar{ const_cast(&input), + subregion()->argument(input.index()), + peer ? subregion()->result(*peer + 1) : nullptr, + peer ? output(*peer) : nullptr }; +} -const ThetaNode::loopvar_iterator & -ThetaNode::loopvar_iterator::operator++() noexcept +[[nodiscard]] ThetaNode::LoopVar +ThetaNode::MapPreLoopVar(const rvsdg::output & argument) const { - if (output_ == nullptr) - return *this; + JLM_ASSERT(rvsdg::TryGetRegionParentNode(argument) == this); + auto peer = MapInputToOutputIndex(argument.index()); + return LoopVar{ input(argument.index()), + const_cast(&argument), + peer ? subregion()->result(*peer + 1) : nullptr, + peer ? output(*peer) : nullptr }; +} - auto node = output_->node(); - auto index = output_->index(); - if (index == node->noutputs() - 1) +[[nodiscard]] ThetaNode::LoopVar +ThetaNode::MapPostLoopVar(const rvsdg::input & result) const +{ + JLM_ASSERT(rvsdg::TryGetRegionParentNode(result) == this); + if (result.index() == 0) { - output_ = nullptr; - return *this; + // This is the loop continuation predicate. + // There is nothing sensible to return here. + throw std::logic_error("cannot map loop continuation predicate to loop variable"); } - - index++; - output_ = node->output(index); - return *this; + auto peer = MapOutputToInputIndex(result.index() - 1); + return LoopVar{ peer ? input(*peer) : nullptr, + peer ? subregion()->argument(*peer) : nullptr, + const_cast(&result), + output(result.index() - 1) }; } -ThetaOutput * -ThetaNode::add_loopvar(jlm::rvsdg::output * origin) +[[nodiscard]] ThetaNode::LoopVar +ThetaNode::MapOutputLoopVar(const rvsdg::output & output) const { - Node::add_input(std::make_unique(this, origin, origin->Type())); - Node::add_output(std::make_unique(this, origin->Type())); - - auto input = ThetaNode::input(ninputs() - 1); - auto output = ThetaNode::output(noutputs() - 1); - input->output_ = output; - output->input_ = input; + JLM_ASSERT(rvsdg::TryGetOwnerNode(output) == this); + auto peer = MapOutputToInputIndex(output.index()); + return LoopVar{ peer ? input(*peer) : nullptr, + peer ? subregion()->argument(*peer) : nullptr, + subregion()->result(output.index() + 1), + const_cast(&output) }; +} - auto & thetaArgument = ThetaArgument::Create(*subregion(), *input); - ThetaResult::Create(thetaArgument, *output); - return output; +[[nodiscard]] std::vector +ThetaNode::GetLoopVars() const +{ + std::vector loopvars; + for (size_t input_index = 0; input_index < ninputs(); ++input_index) + { + // Check if there is a matching input/output -- if we are in + // the process of deleting a loop variable, inputs and outputs + // might be unmatched. + auto output_index = MapInputToOutputIndex(input_index); + if (output_index) + { + loopvars.push_back(LoopVar{ input(input_index), + subregion()->argument(input_index), + subregion()->result(*output_index + 1), + output(*output_index) }); + } + } + return loopvars; } -ThetaNode * -ThetaNode::copy(rvsdg::Region * region, rvsdg::SubstitutionMap & smap) const +std::optional +ThetaNode::MapInputToOutputIndex(std::size_t index) const noexcept { - auto nf = graph()->GetNodeNormalForm(typeid(Operation)); - nf->set_mutable(false); + std::size_t offset = 0; + for (std::size_t unmatched : unmatchedInputs) + { + if (unmatched == index) + { + return std::nullopt; + } + if (unmatched < index) + { + ++offset; + } + } - rvsdg::SubstitutionMap rmap; - auto theta = create(region); + index -= offset; + offset = 0; + for (std::size_t unmatched : unmatchedOutputs) + { + if (unmatched <= index) + { + ++offset; + } + } + return index + offset; +} - /* add loop variables */ - for (auto olv : *this) +std::optional +ThetaNode::MapOutputToInputIndex(std::size_t index) const noexcept +{ + std::size_t offset = 0; + for (std::size_t unmatched : unmatchedOutputs) { - auto nlv = theta->add_loopvar(smap.lookup(olv->input()->origin())); - rmap.insert(olv->argument(), nlv->argument()); + if (unmatched == index) + { + return std::nullopt; + } + if (unmatched < index) + { + ++offset; + } } - /* copy subregion */ - subregion()->copy(theta->subregion(), rmap, false, false); - theta->set_predicate(rmap.lookup(predicate()->origin())); + index -= offset; + offset = 0; + for (std::size_t unmatched : unmatchedInputs) + { + if (unmatched <= index) + { + ++offset; + } + } + return index + offset; +} - /* redirect loop variables */ - for (auto olv = begin(), nlv = theta->begin(); olv != end(); olv++, nlv++) +void +ThetaNode::MarkInputIndexErased(std::size_t index) noexcept +{ + if (auto peer = MapInputToOutputIndex(index)) + { + unmatchedOutputs.push_back(*peer); + } + else + { + auto i = std::remove(unmatchedInputs.begin(), unmatchedInputs.end(), index); + unmatchedInputs.erase(i, unmatchedInputs.end()); + } + for (auto & unmatched : unmatchedInputs) { - (*nlv)->result()->divert_to(rmap.lookup((*olv)->result()->origin())); - smap.insert(olv.output(), nlv.output()); + if (unmatched > index) + { + unmatched -= 1; + } } +} - nf->set_mutable(true); - return theta; +void +ThetaNode::MarkOutputIndexErased(std::size_t index) noexcept +{ + if (auto peer = MapOutputToInputIndex(index)) + { + unmatchedInputs.push_back(*peer); + } + else + { + auto i = std::remove(unmatchedOutputs.begin(), unmatchedOutputs.end(), index); + unmatchedOutputs.erase(i, unmatchedOutputs.end()); + } + for (auto & unmatched : unmatchedOutputs) + { + if (unmatched > index) + { + unmatched -= 1; + } + } } } diff --git a/jlm/rvsdg/theta.hpp b/jlm/rvsdg/theta.hpp index 4265e052f..a6ac7f6ee 100644 --- a/jlm/rvsdg/theta.hpp +++ b/jlm/rvsdg/theta.hpp @@ -7,6 +7,8 @@ #ifndef JLM_RVSDG_THETA_HPP #define JLM_RVSDG_THETA_HPP +#include + #include #include #include @@ -29,68 +31,45 @@ class ThetaOperation final : public StructuralOperation class ThetaInput; class ThetaOutput; +class ThetaResult; class ThetaNode final : public StructuralNode { public: - class loopvar_iterator - { - public: - constexpr loopvar_iterator(ThetaOutput * output) noexcept - : output_(output) - {} - - const loopvar_iterator & - operator++() noexcept; - - inline const loopvar_iterator - operator++(int) noexcept - { - loopvar_iterator it(*this); - ++(*this); - return it; - } - - inline bool - operator==(const loopvar_iterator & other) const noexcept - { - return output_ == other.output_; - } - - inline bool - operator!=(const loopvar_iterator & other) const noexcept - { - return !(*this == other); - } - - ThetaOutput * - operator*() noexcept - { - return output_; - } - - ThetaOutput ** - operator->() noexcept - { - return &output_; - } - - ThetaOutput * - output() const noexcept - { - return output_; - } - - private: - ThetaOutput * output_; - }; - ~ThetaNode() noexcept override; private: explicit ThetaNode(rvsdg::Region & parent); public: + /** + * \brief Description of a loop-carried variable. + * + * A loop-carried variable from the POV of a theta node has + * multiple representations (entry, pre-iteration, + * post-iteration, exit). This structure bundles + * all representations of a single loop-carried variable. + */ + struct LoopVar + { + /** + * \brief Variable at loop entry (input to theta). + */ + rvsdg::input * input; + /** + * \brief Variable before iteration (input argument to subregion). + */ + rvsdg::output * pre; + /** + * \brief Variable after iteration (output result from subregion). + */ + rvsdg::input * post; + /** + * \brief Variable at loop exit (output of theta). + */ + rvsdg::output * output; + }; + static ThetaNode * create(rvsdg::Region * parent) { @@ -121,28 +100,6 @@ class ThetaNode final : public StructuralNode remove(node); } - inline size_t - nloopvars() const noexcept - { - JLM_ASSERT(ninputs() == noutputs()); - return ninputs(); - } - - inline ThetaNode::loopvar_iterator - begin() const - { - if (ninputs() == 0) - return loopvar_iterator(nullptr); - - return loopvar_iterator(output(0)); - } - - inline ThetaNode::loopvar_iterator - end() const - { - return loopvar_iterator(nullptr); - } - /** * Remove theta outputs and their respective results. * @@ -161,7 +118,7 @@ class ThetaNode final : public StructuralNode * \see ThetaOutput#IsDead() */ template - util::HashSet + util::HashSet RemoveThetaOutputsWhere(const F & match); /** @@ -177,10 +134,10 @@ class ThetaNode final : public StructuralNode * \see RemoveThetaOutputsWhere() * \see ThetaOutput#IsDead() */ - util::HashSet + util::HashSet PruneThetaOutputs() { - auto match = [](const ThetaOutput &) + auto match = [](const rvsdg::output &) { return true; }; @@ -194,7 +151,7 @@ class ThetaNode final : public StructuralNode * An input must match the condition specified by \p match and its respective argument must be * dead. * - * @tparam F A type that supports the function call operator: bool operator(const ThetaInput&) + * @tparam F A type that supports the function call operator: bool operator(const jlm::input&) * @param match Defines the condition of the elements to remove. * @return The outputs corresponding to the removed outputs. * @@ -207,7 +164,7 @@ class ThetaNode final : public StructuralNode * \see RegionArgument#IsDead() */ template - util::HashSet + util::HashSet RemoveThetaInputsWhere(const F & match); /** @@ -223,10 +180,10 @@ class ThetaNode final : public StructuralNode * \see RemoveThetaInputsWhere() * \see RegionArgument#IsDead() */ - util::HashSet + util::HashSet PruneThetaInputs() { - auto match = [](const ThetaInput &) + auto match = [](const rvsdg::input &) { return true; }; @@ -234,230 +191,146 @@ class ThetaNode final : public StructuralNode return RemoveThetaInputsWhere(match); } - ThetaInput * - input(size_t index) const noexcept; - - ThetaOutput * - output(size_t index) const noexcept; - - ThetaOutput * - add_loopvar(jlm::rvsdg::output * origin); + LoopVar + AddLoopVar(rvsdg::output * origin); virtual ThetaNode * copy(rvsdg::Region * region, rvsdg::SubstitutionMap & smap) const override; -}; - -class ThetaInput final : public StructuralInput -{ - friend ThetaNode; - friend ThetaOutput; - -public: - ~ThetaInput() noexcept override; - - ThetaInput(ThetaNode * node, jlm::rvsdg::output * origin, std::shared_ptr type) - : StructuralInput(node, origin, std::move(type)), - output_(nullptr) - {} - - ThetaNode * - node() const noexcept - { - return static_cast(StructuralInput::node()); - } - - ThetaOutput * - output() const noexcept - { - return output_; - } - - inline RegionArgument * - argument() const noexcept - { - JLM_ASSERT(arguments.size() == 1); - return arguments.first(); - } - - [[nodiscard]] inline RegionResult * - result() const noexcept; - -private: - ThetaOutput * output_; -}; - -static inline bool -is_invariant(const ThetaInput * input) noexcept -{ - return input->result()->origin() == input->argument(); -} - -class ThetaOutput final : public StructuralOutput -{ - friend ThetaNode; - friend ThetaInput; - -public: - ~ThetaOutput() noexcept override; - - ThetaOutput(ThetaNode * node, const std::shared_ptr type) - : StructuralOutput(node, std::move(type)), - input_(nullptr) - {} - - ThetaNode * - node() const noexcept - { - return static_cast(StructuralOutput::node()); - } - - [[nodiscard]] ThetaInput * - input() const noexcept - { - return input_; - } - - inline RegionArgument * - argument() const noexcept - { - return input_->argument(); - } - [[nodiscard]] RegionResult * - result() const noexcept - { - JLM_ASSERT(results.size() == 1); - return results.first(); - } - -private: - ThetaInput * input_; -}; - -/** - * Represents a region argument in a theta subregion. - */ -class ThetaArgument final : public RegionArgument -{ - friend ThetaNode; - -public: - ~ThetaArgument() noexcept override; - - ThetaArgument & - Copy(rvsdg::Region & region, StructuralInput * input) override; - -private: - ThetaArgument(rvsdg::Region & region, ThetaInput & input) - : RegionArgument(®ion, &input, input.Type()) - { - JLM_ASSERT(is(region.node())); - } - - static ThetaArgument & - Create(rvsdg::Region & region, ThetaInput & input) - { - auto thetaArgument = new ThetaArgument(region, input); - region.append_argument(thetaArgument); - return *thetaArgument; - } -}; - -/** - * Represents a region result in a theta subregion. - */ -class ThetaResult final : public RegionResult -{ - friend ThetaNode; - -public: - ~ThetaResult() noexcept override; - - ThetaResult & - Copy(rvsdg::output & origin, StructuralOutput * output) override; - -private: - ThetaResult(rvsdg::output & origin, ThetaOutput & thetaOutput) - : RegionResult(origin.region(), &origin, &thetaOutput, origin.Type()) - { - JLM_ASSERT(is(origin.region()->node())); - } + /** + * \brief Maps variable at entry to full varibale description. + * + * \param input + * Input to the theta node. + * + * \returns + * The loop variable description. + * + * \pre + * \p input must be an input to this node. + * + * Returns the full description of the loop variable corresponding + * to this entry into the theta node. + */ + [[nodiscard]] LoopVar + MapInputLoopVar(const rvsdg::input & input) const; - static ThetaResult & - Create(rvsdg::output & origin, ThetaOutput & thetaOutput) - { - auto thetaResult = new ThetaResult(origin, thetaOutput); - origin.region()->append_result(thetaResult); - return *thetaResult; - } -}; + /** + * \brief Maps variable at start of loop iteration to full varibale description. + * + * \param argument + * Argument of theta region. + * + * \returns + * The loop variable description. + * + * \pre + * \p argument must be an argument to the subregion of this node. + * + * Returns the full description of the loop variable corresponding + * to this variable at the start of each loop iteration. + */ + [[nodiscard]] LoopVar + MapPreLoopVar(const rvsdg::output & argument) const; -/** - * Represents the predicate result of a theta subregion. - */ -class ThetaPredicateResult final : public RegionResult -{ - friend ThetaNode; + /** + * \brief Maps variable at end of loop iteration to full varibale description. + * + * \param result + * Result of theta region. + * + * \returns + * The loop variable description. + * + * \pre + * \p result must be a result to the subregion of this node. + * + * Returns the full description of the loop variable corresponding + * to this variable at the end of each loop iteration. + */ + [[nodiscard]] LoopVar + MapPostLoopVar(const rvsdg::input & result) const; -public: - ~ThetaPredicateResult() noexcept override; + /** + * \brief Maps variable at exit to full varibale description. + * + * \param output + * Output of this theta node + * + * \returns + * The loop variable description + * + * \pre + * \p output must be an output of this node + * + * Returns the full description of the loop variable corresponding + * to this loop exit value. + */ + [[nodiscard]] LoopVar + MapOutputLoopVar(const rvsdg::output & output) const; - ThetaPredicateResult & - Copy(rvsdg::output & origin, StructuralOutput * output) override; + /** + * \brief Returns all loop variables. + * + * \returns + * List of loop variable descriptions. + */ + [[nodiscard]] std::vector + GetLoopVars() const; private: - explicit ThetaPredicateResult(rvsdg::output & origin) - : RegionResult(origin.region(), &origin, nullptr, ControlType::Create(2)) - { - JLM_ASSERT(is(origin.region()->node())); - } - - static ThetaPredicateResult & - Create(rvsdg::output & origin) - { - auto thetaResult = new ThetaPredicateResult(origin); - origin.region()->append_result(thetaResult); - return *thetaResult; - } + // Calling RemoveThetaInputsWhere/RemoveThetaOutputsWhere can result + // in inputs (and pre-loop arguments) and outputs (and post-loop results) + // to become unmatched. In this case, the theta node itself has + // "invalid" shape until fixed properly. + // The indices of unmatched inputs/outputs are tracked here to + // detect this situation, and also to provide correct mapping. + // Computing the mapping is a bit fiddly as it requires adjusting + // indices accordingly, should seriously consider whether this + // is really necessary or things can rather be reformulated such that + // inputs/outputs are always consistent. + + std::optional + MapInputToOutputIndex(std::size_t index) const noexcept; + + std::optional + MapOutputToInputIndex(std::size_t index) const noexcept; + + void + MarkInputIndexErased(std::size_t index) noexcept; + + void + MarkOutputIndexErased(std::size_t index) noexcept; + + std::vector unmatchedInputs; + std::vector unmatchedOutputs; }; static inline bool -is_invariant(const ThetaOutput * output) noexcept +ThetaLoopVarIsInvariant(const ThetaNode::LoopVar & loopVar) noexcept { - return output->result()->origin() == output->argument(); + return loopVar.post->origin() == loopVar.pre; } /* theta node method definitions */ -inline ThetaInput * -ThetaNode::input(size_t index) const noexcept -{ - return static_cast(Node::input(index)); -} - -inline ThetaOutput * -ThetaNode::output(size_t index) const noexcept -{ - return static_cast(Node::output(index)); -} - template -util::HashSet +util::HashSet ThetaNode::RemoveThetaOutputsWhere(const F & match) { - util::HashSet deadInputs; + util::HashSet deadInputs; + auto loopvars = GetLoopVars(); // iterate backwards to avoid the invalidation of 'n' by RemoveOutput() - for (size_t n = noutputs() - 1; n != static_cast(-1); n--) + for (size_t n = loopvars.size(); n > 0; --n) { - auto & thetaOutput = *output(n); - auto & thetaResult = *thetaOutput.result(); - - if (thetaOutput.IsDead() && match(thetaOutput)) + auto & loopvar = loopvars[n - 1]; + if (loopvar.output->IsDead() && match(*loopvar.output)) { - deadInputs.Insert(thetaOutput.input()); - subregion()->RemoveResult(thetaResult.index()); - RemoveOutput(thetaOutput.index()); + deadInputs.Insert(loopvar.input); + subregion()->RemoveResult(loopvar.post->index()); + MarkOutputIndexErased(loopvar.output->index()); + RemoveOutput(loopvar.output->index()); } } @@ -465,21 +338,22 @@ ThetaNode::RemoveThetaOutputsWhere(const F & match) } template -util::HashSet +util::HashSet ThetaNode::RemoveThetaInputsWhere(const F & match) { - util::HashSet deadOutputs; + util::HashSet deadOutputs; // iterate backwards to avoid the invalidation of 'n' by RemoveInput() for (size_t n = ninputs() - 1; n != static_cast(-1); n--) { auto & thetaInput = *input(n); - auto & thetaArgument = *thetaInput.argument(); + auto loopvar = MapInputLoopVar(thetaInput); - if (thetaArgument.IsDead() && match(thetaInput)) + if (loopvar.pre->IsDead() && match(thetaInput)) { - deadOutputs.Insert(thetaInput.output()); - subregion()->RemoveArgument(thetaArgument.index()); + deadOutputs.Insert(loopvar.output); + subregion()->RemoveArgument(loopvar.pre->index()); + MarkInputIndexErased(thetaInput.index()); RemoveInput(thetaInput.index()); } } @@ -487,14 +361,6 @@ ThetaNode::RemoveThetaInputsWhere(const F & match) return deadOutputs; } -/* theta input method definitions */ - -[[nodiscard]] inline RegionResult * -ThetaInput::result() const noexcept -{ - return output_->result(); -} - } #endif diff --git a/tests/TestRvsdgs.cpp b/tests/TestRvsdgs.cpp index 7a94a119d..2e2e7ca81 100644 --- a/tests/TestRvsdgs.cpp +++ b/tests/TestRvsdgs.cpp @@ -1622,29 +1622,29 @@ ThetaTest::SetupRvsdg() auto thetanode = jlm::rvsdg::ThetaNode::create(fct->subregion()); - auto n = thetanode->add_loopvar(zero); - auto l = thetanode->add_loopvar(fct->GetFunctionArguments()[0]); - auto a = thetanode->add_loopvar(fct->GetFunctionArguments()[1]); - auto c = thetanode->add_loopvar(fct->GetFunctionArguments()[2]); - auto s = thetanode->add_loopvar(fct->GetFunctionArguments()[3]); + auto n = thetanode->AddLoopVar(zero); + auto l = thetanode->AddLoopVar(fct->GetFunctionArguments()[0]); + auto a = thetanode->AddLoopVar(fct->GetFunctionArguments()[1]); + auto c = thetanode->AddLoopVar(fct->GetFunctionArguments()[2]); + auto s = thetanode->AddLoopVar(fct->GetFunctionArguments()[3]); auto gepnode = GetElementPtrOperation::Create( - a->argument(), - { n->argument() }, + a.pre, + { n.pre }, jlm::rvsdg::bittype::Create(32), pointerType); - auto store = StoreNonVolatileNode::Create(gepnode, c->argument(), { s->argument() }, 4); + auto store = StoreNonVolatileNode::Create(gepnode, c.pre, { s.pre }, 4); auto one = jlm::rvsdg::create_bitconstant(thetanode->subregion(), 32, 1); - auto sum = jlm::rvsdg::bitadd_op::create(32, n->argument(), one); - auto cmp = jlm::rvsdg::bitult_op::create(32, sum, l->argument()); + auto sum = jlm::rvsdg::bitadd_op::create(32, n.pre, one); + auto cmp = jlm::rvsdg::bitult_op::create(32, sum, l.pre); auto predicate = jlm::rvsdg::match(1, { { 1, 1 } }, 0, 2, cmp); - n->result()->divert_to(sum); - s->result()->divert_to(store[0]); + n.post->divert_to(sum); + s.post->divert_to(store[0]); thetanode->set_predicate(predicate); - fct->finalize({ s }); + fct->finalize({ s.output }); GraphExport::Create(*fct->output(), "f"); /* diff --git a/tests/jlm/hls/backend/rvsdg2rhls/DeadNodeEliminationTests.cpp b/tests/jlm/hls/backend/rvsdg2rhls/DeadNodeEliminationTests.cpp index 67cdcb9f8..ffb18ee61 100644 --- a/tests/jlm/hls/backend/rvsdg2rhls/DeadNodeEliminationTests.cpp +++ b/tests/jlm/hls/backend/rvsdg2rhls/DeadNodeEliminationTests.cpp @@ -66,8 +66,8 @@ TestDeadLoopNodeOutput() auto loopNode = loop_node::create(lambdaNode->subregion()); jlm::rvsdg::output * buffer; - auto output0 = loopNode->add_loopvar(p, &buffer); - loopNode->add_loopvar(x); + auto output0 = loopNode->AddLoopVar(p, &buffer); + loopNode->AddLoopVar(x); loopNode->set_predicate(buffer); auto lambdaOutput = lambdaNode->finalize({ output0 }); diff --git a/tests/jlm/hls/backend/rvsdg2rhls/MemoryConverterTests.cpp b/tests/jlm/hls/backend/rvsdg2rhls/MemoryConverterTests.cpp index 24bc435bb..33cb51fe4 100644 --- a/tests/jlm/hls/backend/rvsdg2rhls/MemoryConverterTests.cpp +++ b/tests/jlm/hls/backend/rvsdg2rhls/MemoryConverterTests.cpp @@ -274,33 +274,29 @@ TestThetaLoad() auto theta = jlm::rvsdg::ThetaNode::create(lambda->subregion()); auto thetaRegion = theta->subregion(); // Predicate - auto idv = theta->add_loopvar(lambda->GetFunctionArguments()[0]); - auto lvs = theta->add_loopvar(lambda->GetFunctionArguments()[1]); - auto lve = theta->add_loopvar(lambda->GetFunctionArguments()[2]); + auto idv = theta->AddLoopVar(lambda->GetFunctionArguments()[0]); + auto lvs = theta->AddLoopVar(lambda->GetFunctionArguments()[1]); + auto lve = theta->AddLoopVar(lambda->GetFunctionArguments()[2]); jlm::rvsdg::bitult_op ult(32); jlm::rvsdg::bitsgt_op sgt(32); jlm::rvsdg::bitadd_op add(32); jlm::rvsdg::bitsub_op sub(32); - auto arm = jlm::rvsdg::SimpleNode::create_normalized( - thetaRegion, - add, - { idv->argument(), lvs->argument() })[0]; - auto cmp = - jlm::rvsdg::SimpleNode::create_normalized(thetaRegion, ult, { arm, lve->argument() })[0]; + auto arm = jlm::rvsdg::SimpleNode::create_normalized(thetaRegion, add, { idv.pre, lvs.pre })[0]; + auto cmp = jlm::rvsdg::SimpleNode::create_normalized(thetaRegion, ult, { arm, lve.pre })[0]; auto match = jlm::rvsdg::match(1, { { 1, 1 } }, 0, 2, cmp); - idv->result()->divert_to(arm); + idv.post->divert_to(arm); theta->set_predicate(match); // Load node - auto loadAddress = theta->add_loopvar(lambda->GetFunctionArguments()[3]); - auto memoryStateArgument = theta->add_loopvar(lambda->GetFunctionArguments()[4]); + auto loadAddress = theta->AddLoopVar(lambda->GetFunctionArguments()[3]); + auto memoryStateArgument = theta->AddLoopVar(lambda->GetFunctionArguments()[4]); auto loadOutput = LoadNonVolatileNode::Create( - loadAddress->argument(), - { memoryStateArgument->argument() }, + loadAddress.pre, + { memoryStateArgument.pre }, PointerType::Create(), 32); - loadAddress->result()->divert_to(loadOutput[0]); - memoryStateArgument->result()->divert_to(loadOutput[1]); + loadAddress.post->divert_to(loadOutput[0]); + memoryStateArgument.post->divert_to(loadOutput[1]); auto lambdaOutput = lambda->finalize({ theta->output(3), theta->output(4) }); GraphExport::Create(*lambdaOutput, "f"); diff --git a/tests/jlm/hls/backend/rvsdg2rhls/MemoryQueueTests.cpp b/tests/jlm/hls/backend/rvsdg2rhls/MemoryQueueTests.cpp index 6a522ea7d..5fbec9e50 100644 --- a/tests/jlm/hls/backend/rvsdg2rhls/MemoryQueueTests.cpp +++ b/tests/jlm/hls/backend/rvsdg2rhls/MemoryQueueTests.cpp @@ -44,15 +44,15 @@ TestSingleLoad() // Load node auto functionArguments = lambda->GetFunctionArguments(); - auto loadAddress = theta->add_loopvar(functionArguments[0]); - auto memoryStateArgument = theta->add_loopvar(functionArguments[1]); + auto loadAddress = theta->AddLoopVar(functionArguments[0]); + auto memoryStateArgument = theta->AddLoopVar(functionArguments[1]); auto loadOutput = LoadNonVolatileNode::Create( - loadAddress->argument(), - { memoryStateArgument->argument() }, + loadAddress.pre, + { memoryStateArgument.pre }, PointerType::Create(), 32); - loadAddress->result()->divert_to(loadOutput[0]); - memoryStateArgument->result()->divert_to(loadOutput[1]); + loadAddress.post->divert_to(loadOutput[0]); + memoryStateArgument.post->divert_to(loadOutput[1]); auto lambdaOutput = lambda->finalize({ theta->output(0), theta->output(1) }); GraphExport::Create(*lambdaOutput, "f"); @@ -122,22 +122,22 @@ TestLoadStore() // Load node auto functionArguments = lambda->GetFunctionArguments(); - auto loadAddress = theta->add_loopvar(functionArguments[0]); - auto storeAddress = theta->add_loopvar(functionArguments[1]); - auto memoryStateArgument = theta->add_loopvar(functionArguments[2]); + auto loadAddress = theta->AddLoopVar(functionArguments[0]); + auto storeAddress = theta->AddLoopVar(functionArguments[1]); + auto memoryStateArgument = theta->AddLoopVar(functionArguments[2]); auto loadOutput = LoadNonVolatileNode::Create( - loadAddress->argument(), - { memoryStateArgument->argument() }, + loadAddress.pre, + { memoryStateArgument.pre }, PointerType::Create(), 32); auto storeOutput = StoreNonVolatileNode::Create( - storeAddress->argument(), + storeAddress.pre, jlm::rvsdg::create_bitconstant(theta->subregion(), 32, 1), { loadOutput[1] }, 32); - loadAddress->result()->divert_to(loadOutput[0]); - memoryStateArgument->result()->divert_to(storeOutput[0]); + loadAddress.post->divert_to(loadOutput[0]); + memoryStateArgument.post->divert_to(storeOutput[0]); auto lambdaOutput = lambda->finalize({ theta->output(0), theta->output(2) }); GraphExport::Create(*lambdaOutput, "f"); @@ -205,18 +205,18 @@ TestAddrQueue() // Load node auto functionArguments = lambda->GetFunctionArguments(); - auto address = theta->add_loopvar(functionArguments[0]); - auto memoryStateArgument = theta->add_loopvar(functionArguments[1]); + auto address = theta->AddLoopVar(functionArguments[0]); + auto memoryStateArgument = theta->AddLoopVar(functionArguments[1]); auto loadOutput = LoadNonVolatileNode::Create( - address->argument(), - { memoryStateArgument->argument() }, + address.pre, + { memoryStateArgument.pre }, PointerType::Create(), 32); auto storeOutput = - StoreNonVolatileNode::Create(address->argument(), loadOutput[0], { loadOutput[1] }, 32); + StoreNonVolatileNode::Create(address.pre, loadOutput[0], { loadOutput[1] }, 32); - address->result()->divert_to(loadOutput[0]); - memoryStateArgument->result()->divert_to(storeOutput[0]); + address.post->divert_to(loadOutput[0]); + memoryStateArgument.post->divert_to(storeOutput[0]); auto lambdaOutput = lambda->finalize({ theta->output(0), theta->output(1) }); GraphExport::Create(*lambdaOutput, "f"); diff --git a/tests/jlm/hls/backend/rvsdg2rhls/TestFork.cpp b/tests/jlm/hls/backend/rvsdg2rhls/TestFork.cpp index 6c53a3f76..2392fe28b 100644 --- a/tests/jlm/hls/backend/rvsdg2rhls/TestFork.cpp +++ b/tests/jlm/hls/backend/rvsdg2rhls/TestFork.cpp @@ -34,11 +34,11 @@ TestFork() auto loop = hls::loop_node::create(lambda->subregion()); auto subregion = loop->subregion(); rvsdg::output * idvBuffer; - loop->add_loopvar(lambda->GetFunctionArguments()[0], &idvBuffer); + loop->AddLoopVar(lambda->GetFunctionArguments()[0], &idvBuffer); rvsdg::output * lvsBuffer; - loop->add_loopvar(lambda->GetFunctionArguments()[1], &lvsBuffer); + loop->AddLoopVar(lambda->GetFunctionArguments()[1], &lvsBuffer); rvsdg::output * lveBuffer; - loop->add_loopvar(lambda->GetFunctionArguments()[2], &lveBuffer); + loop->AddLoopVar(lambda->GetFunctionArguments()[2], &lveBuffer); auto arm = rvsdg::SimpleNode::create_normalized(subregion, add, { idvBuffer, lvsBuffer })[0]; auto cmp = rvsdg::SimpleNode::create_normalized(subregion, ult, { arm, lveBuffer })[0]; @@ -104,7 +104,7 @@ TestConstantFork() auto loop = hls::loop_node::create(lambdaRegion); auto subregion = loop->subregion(); rvsdg::output * idvBuffer; - loop->add_loopvar(lambda->GetFunctionArguments()[0], &idvBuffer); + loop->AddLoopVar(lambda->GetFunctionArguments()[0], &idvBuffer); auto bitConstant1 = rvsdg::create_bitconstant(subregion, 32, 1); auto arm = rvsdg::SimpleNode::create_normalized(subregion, add, { idvBuffer, bitConstant1 })[0]; diff --git a/tests/jlm/hls/backend/rvsdg2rhls/TestTheta.cpp b/tests/jlm/hls/backend/rvsdg2rhls/TestTheta.cpp index 1b1191645..e82f91588 100644 --- a/tests/jlm/hls/backend/rvsdg2rhls/TestTheta.cpp +++ b/tests/jlm/hls/backend/rvsdg2rhls/TestTheta.cpp @@ -34,18 +34,15 @@ TestUnknownBoundaries() auto theta = jlm::rvsdg::ThetaNode::create(lambda->subregion()); auto subregion = theta->subregion(); - auto idv = theta->add_loopvar(lambda->GetFunctionArguments()[0]); - auto lvs = theta->add_loopvar(lambda->GetFunctionArguments()[1]); - auto lve = theta->add_loopvar(lambda->GetFunctionArguments()[2]); - - auto arm = jlm::rvsdg::SimpleNode::create_normalized( - subregion, - add, - { idv->argument(), lvs->argument() })[0]; - auto cmp = jlm::rvsdg::SimpleNode::create_normalized(subregion, ult, { arm, lve->argument() })[0]; + auto idv = theta->AddLoopVar(lambda->GetFunctionArguments()[0]); + auto lvs = theta->AddLoopVar(lambda->GetFunctionArguments()[1]); + auto lve = theta->AddLoopVar(lambda->GetFunctionArguments()[2]); + + auto arm = jlm::rvsdg::SimpleNode::create_normalized(subregion, add, { idv.pre, lvs.pre })[0]; + auto cmp = jlm::rvsdg::SimpleNode::create_normalized(subregion, ult, { arm, lve.pre })[0]; auto match = jlm::rvsdg::match(1, { { 1, 1 } }, 0, 2, cmp); - idv->result()->divert_to(arm); + idv.post->divert_to(arm); theta->set_predicate(match); auto f = lambda->finalize({ theta->output(0), theta->output(1), theta->output(2) }); diff --git a/tests/jlm/hls/backend/rvsdg2rhls/UnusedStateRemovalTests.cpp b/tests/jlm/hls/backend/rvsdg2rhls/UnusedStateRemovalTests.cpp index 8ca61d7ba..8320c0d4c 100644 --- a/tests/jlm/hls/backend/rvsdg2rhls/UnusedStateRemovalTests.cpp +++ b/tests/jlm/hls/backend/rvsdg2rhls/UnusedStateRemovalTests.cpp @@ -90,20 +90,21 @@ TestTheta() auto thetaNode = jlm::rvsdg::ThetaNode::create(&rvsdg.GetRootRegion()); - auto thetaOutput0 = thetaNode->add_loopvar(p); - auto thetaOutput1 = thetaNode->add_loopvar(x); - auto thetaOutput2 = thetaNode->add_loopvar(y); - auto thetaOutput3 = thetaNode->add_loopvar(z); - - thetaOutput2->result()->divert_to(thetaOutput3->argument()); - thetaOutput3->result()->divert_to(thetaOutput2->argument()); - thetaNode->set_predicate(thetaOutput0->argument()); - - auto result = jlm::tests::SimpleNode::Create( - rvsdg.GetRootRegion(), - { thetaOutput0, thetaOutput1, thetaOutput2, thetaOutput3 }, - { valueType }) - .output(0); + auto thetaOutput0 = thetaNode->AddLoopVar(p); + auto thetaOutput1 = thetaNode->AddLoopVar(x); + auto thetaOutput2 = thetaNode->AddLoopVar(y); + auto thetaOutput3 = thetaNode->AddLoopVar(z); + + thetaOutput2.post->divert_to(thetaOutput3.pre); + thetaOutput3.post->divert_to(thetaOutput2.pre); + thetaNode->set_predicate(thetaOutput0.pre); + + auto result = + jlm::tests::SimpleNode::Create( + rvsdg.GetRootRegion(), + { thetaOutput0.output, thetaOutput1.output, thetaOutput2.output, thetaOutput3.output }, + { valueType }) + .output(0); GraphExport::Create(*result, "f"); diff --git a/tests/jlm/hls/backend/rvsdg2rhls/test-loop-passthrough.cpp b/tests/jlm/hls/backend/rvsdg2rhls/test-loop-passthrough.cpp index d3b2011f4..c81269da1 100644 --- a/tests/jlm/hls/backend/rvsdg2rhls/test-loop-passthrough.cpp +++ b/tests/jlm/hls/backend/rvsdg2rhls/test-loop-passthrough.cpp @@ -52,7 +52,7 @@ test() auto loop = hls::loop_node::create(lambda->subregion()); - auto loop_out = loop->add_loopvar(lambda->GetFunctionArguments()[1]); + auto loop_out = loop->AddLoopVar(lambda->GetFunctionArguments()[1]); auto f = lambda->finalize({ loop_out }); jlm::llvm::GraphExport::Create(*f, ""); diff --git a/tests/jlm/llvm/ir/operators/TestCall.cpp b/tests/jlm/llvm/ir/operators/TestCall.cpp index 8308196e0..1042741fc 100644 --- a/tests/jlm/llvm/ir/operators/TestCall.cpp +++ b/tests/jlm/llvm/ir/operators/TestCall.cpp @@ -200,18 +200,18 @@ TestCallTypeClassifierNonRecursiveDirectCall() auto SetupOuterTheta = [](jlm::rvsdg::Region * region, jlm::rvsdg::output * functionG) { auto outerTheta = jlm::rvsdg::ThetaNode::create(region); - auto otf = outerTheta->add_loopvar(functionG); + auto otf = outerTheta->AddLoopVar(functionG); auto innerTheta = jlm::rvsdg::ThetaNode::create(outerTheta->subregion()); - auto itf = innerTheta->add_loopvar(otf->argument()); + auto itf = innerTheta->AddLoopVar(otf.pre); auto predicate = jlm::rvsdg::control_false(innerTheta->subregion()); auto gamma = jlm::rvsdg::GammaNode::create(predicate, 2); - auto ev = gamma->AddEntryVar(itf->argument()); + auto ev = gamma->AddEntryVar(itf.pre); auto xv = gamma->AddExitVar(ev.branchArgument); - itf->result()->divert_to(xv.output); - otf->result()->divert_to(itf); + itf.post->divert_to(xv.output); + otf.post->divert_to(itf.output); return otf; }; @@ -233,7 +233,7 @@ TestCallTypeClassifierNonRecursiveDirectCall() auto functionG = SetupOuterTheta(lambda->subregion(), functionGArgument); auto callResults = - CallNode::Create(functionG, functionTypeG, { iOStateArgument, memoryStateArgument }); + CallNode::Create(functionG.output, functionTypeG, { iOStateArgument, memoryStateArgument }); lambda->finalize(callResults); @@ -300,31 +300,31 @@ TestCallTypeClassifierNonRecursiveDirectCallTheta() jlm::rvsdg::output * iOState, jlm::rvsdg::output * memoryState) { - auto SetupInnerTheta = [&](jlm::rvsdg::Region * region, jlm::rvsdg::RegionArgument * g) + auto SetupInnerTheta = [&](jlm::rvsdg::Region * region, jlm::rvsdg::output * g) { auto innerTheta = jlm::rvsdg::ThetaNode::create(region); - auto thetaOutputG = innerTheta->add_loopvar(g); + auto thetaOutputG = innerTheta->AddLoopVar(g); return thetaOutputG; }; auto outerTheta = jlm::rvsdg::ThetaNode::create(region); - auto thetaOutputG = outerTheta->add_loopvar(g); - auto thetaOutputValue = outerTheta->add_loopvar(value); - auto thetaOutputIoState = outerTheta->add_loopvar(iOState); - auto thetaOutputMemoryState = outerTheta->add_loopvar(memoryState); + auto thetaOutputG = outerTheta->AddLoopVar(g); + auto thetaOutputValue = outerTheta->AddLoopVar(value); + auto thetaOutputIoState = outerTheta->AddLoopVar(iOState); + auto thetaOutputMemoryState = outerTheta->AddLoopVar(memoryState); - auto functionG = SetupInnerTheta(outerTheta->subregion(), thetaOutputG->argument()); + auto functionG = SetupInnerTheta(outerTheta->subregion(), thetaOutputG.pre); auto callResults = CallNode::Create( - functionG, + functionG.output, functionTypeG, - { thetaOutputIoState->argument(), thetaOutputMemoryState->argument() }); + { thetaOutputIoState.pre, thetaOutputMemoryState.pre }); - thetaOutputG->result()->divert_to(functionG); - thetaOutputValue->result()->divert_to(callResults[0]); - thetaOutputIoState->result()->divert_to(callResults[1]); - thetaOutputMemoryState->result()->divert_to(callResults[2]); + thetaOutputG.post->divert_to(functionG.output); + thetaOutputValue.post->divert_to(callResults[0]); + thetaOutputIoState.post->divert_to(callResults[1]); + thetaOutputMemoryState.post->divert_to(callResults[2]); return std::make_tuple( thetaOutputValue, @@ -356,7 +356,7 @@ TestCallTypeClassifierNonRecursiveDirectCallTheta() iOStateArgument, memoryStateArgument); - auto lambdaOutput = lambda->finalize({ loopValue, iOState, memoryState }); + auto lambdaOutput = lambda->finalize({ loopValue.output, iOState.output, memoryState.output }); return std::make_tuple(lambdaOutput, callNode); }; diff --git a/tests/jlm/llvm/opt/InvariantValueRedirectionTests.cpp b/tests/jlm/llvm/opt/InvariantValueRedirectionTests.cpp index a58238d9f..d3c0b587c 100644 --- a/tests/jlm/llvm/opt/InvariantValueRedirectionTests.cpp +++ b/tests/jlm/llvm/opt/InvariantValueRedirectionTests.cpp @@ -104,20 +104,21 @@ TestTheta() auto l = lambdaNode->GetFunctionArguments()[2]; auto thetaNode1 = jlm::rvsdg::ThetaNode::create(lambdaNode->subregion()); - auto thetaOutput1 = thetaNode1->add_loopvar(c); - auto thetaOutput2 = thetaNode1->add_loopvar(x); - auto thetaOutput3 = thetaNode1->add_loopvar(l); + auto thetaVar1 = thetaNode1->AddLoopVar(c); + auto thetaVar2 = thetaNode1->AddLoopVar(x); + auto thetaVar3 = thetaNode1->AddLoopVar(l); auto thetaNode2 = jlm::rvsdg::ThetaNode::create(thetaNode1->subregion()); - auto thetaOutput4 = thetaNode2->add_loopvar(thetaOutput1->argument()); - thetaNode2->add_loopvar(thetaOutput2->argument()); - auto thetaOutput5 = thetaNode2->add_loopvar(thetaOutput3->argument()); - thetaNode2->set_predicate(thetaOutput4->argument()); + auto thetaVar4 = thetaNode2->AddLoopVar(thetaVar1.pre); + thetaNode2->AddLoopVar(thetaVar2.pre); + auto thetaVar5 = thetaNode2->AddLoopVar(thetaVar3.pre); + thetaNode2->set_predicate(thetaVar4.pre); - thetaOutput3->result()->divert_to(thetaOutput5); - thetaNode1->set_predicate(thetaOutput1->argument()); + thetaVar3.post->divert_to(thetaVar5.output); + thetaNode1->set_predicate(thetaVar1.pre); - auto lambdaOutput = lambdaNode->finalize({ thetaOutput1, thetaOutput2, thetaOutput3 }); + auto lambdaOutput = + lambdaNode->finalize({ thetaVar1.output, thetaVar2.output, thetaVar3.output }); GraphExport::Create(*lambdaOutput, "test"); @@ -127,7 +128,7 @@ TestTheta() // Assert assert(lambdaNode->GetFunctionResults()[0]->origin() == c); assert(lambdaNode->GetFunctionResults()[1]->origin() == x); - assert(lambdaNode->GetFunctionResults()[2]->origin() == thetaOutput3); + assert(lambdaNode->GetFunctionResults()[2]->origin() == thetaVar3.output); return 0; } diff --git a/tests/jlm/llvm/opt/TestDeadNodeElimination.cpp b/tests/jlm/llvm/opt/TestDeadNodeElimination.cpp index 4a4be8c84..dbabb95be 100644 --- a/tests/jlm/llvm/opt/TestDeadNodeElimination.cpp +++ b/tests/jlm/llvm/opt/TestDeadNodeElimination.cpp @@ -130,23 +130,23 @@ TestTheta() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lv1 = theta->add_loopvar(x); - auto lv2 = theta->add_loopvar(y); - auto lv3 = theta->add_loopvar(z); - auto lv4 = theta->add_loopvar(y); + auto lv1 = theta->AddLoopVar(x); + auto lv2 = theta->AddLoopVar(y); + auto lv3 = theta->AddLoopVar(z); + auto lv4 = theta->AddLoopVar(y); - lv1->result()->divert_to(lv2->argument()); - lv2->result()->divert_to(lv1->argument()); + lv1.post->divert_to(lv2.pre); + lv2.post->divert_to(lv1.pre); - auto t = jlm::tests::create_testop(theta->subregion(), { lv3->argument() }, { vt })[0]; - lv3->result()->divert_to(t); - lv4->result()->divert_to(lv2->argument()); + auto t = jlm::tests::create_testop(theta->subregion(), { lv3.pre }, { vt })[0]; + lv3.post->divert_to(t); + lv4.post->divert_to(lv2.pre); auto c = jlm::tests::create_testop(theta->subregion(), {}, { ct })[0]; theta->set_predicate(c); - GraphExport::Create(*theta->output(0), "a"); - GraphExport::Create(*theta->output(3), "b"); + GraphExport::Create(*lv1.output, "a"); + GraphExport::Create(*lv4.output, "b"); // jlm::rvsdg::view(graph.GetRootRegion(), stdout); RunDeadNodeElimination(rm); @@ -173,26 +173,26 @@ TestNestedTheta() auto otheta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lvo1 = otheta->add_loopvar(c); - auto lvo2 = otheta->add_loopvar(x); - auto lvo3 = otheta->add_loopvar(y); + auto lvo1 = otheta->AddLoopVar(c); + auto lvo2 = otheta->AddLoopVar(x); + auto lvo3 = otheta->AddLoopVar(y); auto itheta = jlm::rvsdg::ThetaNode::create(otheta->subregion()); - auto lvi1 = itheta->add_loopvar(lvo1->argument()); - auto lvi2 = itheta->add_loopvar(lvo2->argument()); - auto lvi3 = itheta->add_loopvar(lvo3->argument()); + auto lvi1 = itheta->AddLoopVar(lvo1.pre); + auto lvi2 = itheta->AddLoopVar(lvo2.pre); + auto lvi3 = itheta->AddLoopVar(lvo3.pre); - lvi2->result()->divert_to(lvi3->argument()); + lvi2.post->divert_to(lvi3.pre); - itheta->set_predicate(lvi1->argument()); + itheta->set_predicate(lvi1.pre); - lvo2->result()->divert_to(itheta->output(1)); - lvo3->result()->divert_to(itheta->output(1)); + lvo2.post->divert_to(lvi2.output); + lvo3.post->divert_to(lvi2.output); - otheta->set_predicate(lvo1->argument()); + otheta->set_predicate(lvo1.pre); - GraphExport::Create(*otheta->output(2), "y"); + GraphExport::Create(*lvo3.output, "y"); // jlm::rvsdg::view(graph, stdout); RunDeadNodeElimination(rm); @@ -219,19 +219,19 @@ TestEvolvingTheta() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lv0 = theta->add_loopvar(c); - auto lv1 = theta->add_loopvar(x1); - auto lv2 = theta->add_loopvar(x2); - auto lv3 = theta->add_loopvar(x3); - auto lv4 = theta->add_loopvar(x4); + auto lv0 = theta->AddLoopVar(c); + auto lv1 = theta->AddLoopVar(x1); + auto lv2 = theta->AddLoopVar(x2); + auto lv3 = theta->AddLoopVar(x3); + auto lv4 = theta->AddLoopVar(x4); - lv1->result()->divert_to(lv2->argument()); - lv2->result()->divert_to(lv3->argument()); - lv3->result()->divert_to(lv4->argument()); + lv1.post->divert_to(lv2.pre); + lv2.post->divert_to(lv3.pre); + lv3.post->divert_to(lv4.pre); - theta->set_predicate(lv0->argument()); + theta->set_predicate(lv0.pre); - GraphExport::Create(*lv1, "x1"); + GraphExport::Create(*lv1.output, "x1"); // jlm::rvsdg::view(graph, stdout); RunDeadNodeElimination(rm); diff --git a/tests/jlm/llvm/opt/alias-analyses/TestAndersen.cpp b/tests/jlm/llvm/opt/alias-analyses/TestAndersen.cpp index 4237dde43..f3ae6d7a1 100644 --- a/tests/jlm/llvm/opt/alias-analyses/TestAndersen.cpp +++ b/tests/jlm/llvm/opt/alias-analyses/TestAndersen.cpp @@ -595,7 +595,7 @@ TestTheta() auto & gepOutput = ptg->GetRegisterNode(*test.gep->output(0)); - auto & thetaArgument2 = ptg->GetRegisterNode(*test.theta->output(2)->argument()); + auto & thetaArgument2 = ptg->GetRegisterNode(*test.theta->GetLoopVars()[2].pre); auto & thetaOutput2 = ptg->GetRegisterNode(*test.theta->output(2)); assert(TargetsExactly(lambdaArgument1, { &lambda, &ptg->GetExternalMemoryNode() })); diff --git a/tests/jlm/llvm/opt/alias-analyses/TestMemoryStateEncoder.cpp b/tests/jlm/llvm/opt/alias-analyses/TestMemoryStateEncoder.cpp index f55d5e5a6..21bc00d3c 100644 --- a/tests/jlm/llvm/opt/alias-analyses/TestMemoryStateEncoder.cpp +++ b/tests/jlm/llvm/opt/alias-analyses/TestMemoryStateEncoder.cpp @@ -1420,17 +1420,17 @@ ValidateThetaTestSteensgaardAgnostic(const jlm::tests::ThetaTest & test) jlm::rvsdg::output::GetNode(*test.lambda->GetFunctionResults()[0]->origin()); assert(is(*lambda_exit_mux, 2, 1)); - auto thetaOutput = - jlm::util::AssertedCast(lambda_exit_mux->input(0)->origin()); - auto theta = jlm::rvsdg::output::GetNode(*thetaOutput); + auto thetaOutput = lambda_exit_mux->input(0)->origin(); + auto theta = jlm::rvsdg::TryGetOwnerNode(*thetaOutput); assert(theta == test.theta); - auto storeStateOutput = thetaOutput->result()->origin(); + auto loopvar = theta->MapOutputLoopVar(*thetaOutput); + auto storeStateOutput = loopvar.post->origin(); auto store = jlm::rvsdg::output::GetNode(*storeStateOutput); assert(is(*store, 4, 2)); - assert(store->input(storeStateOutput->index() + 2)->origin() == thetaOutput->argument()); + assert(store->input(storeStateOutput->index() + 2)->origin() == loopvar.pre); - auto lambda_entry_mux = jlm::rvsdg::output::GetNode(*thetaOutput->input()->origin()); + auto lambda_entry_mux = jlm::rvsdg::output::GetNode(*loopvar.input->origin()); assert(is(*lambda_entry_mux, 1, 2)); } @@ -1445,17 +1445,17 @@ ValidateThetaTestSteensgaardRegionAware(const jlm::tests::ThetaTest & test) jlm::rvsdg::output::GetNode(*test.lambda->GetFunctionResults()[0]->origin()); assert(is(*lambdaExitMerge, 2, 1)); - auto thetaOutput = - jlm::util::AssertedCast(lambdaExitMerge->input(0)->origin()); - auto theta = jlm::rvsdg::output::GetNode(*thetaOutput); + auto thetaOutput = lambdaExitMerge->input(0)->origin(); + auto theta = jlm::rvsdg::TryGetOwnerNode(*thetaOutput); assert(theta == test.theta); + auto loopvar = theta->MapOutputLoopVar(*thetaOutput); - auto storeStateOutput = thetaOutput->result()->origin(); + auto storeStateOutput = loopvar.post->origin(); auto store = jlm::rvsdg::output::GetNode(*storeStateOutput); assert(is(*store, 4, 2)); - assert(store->input(storeStateOutput->index() + 2)->origin() == thetaOutput->argument()); + assert(store->input(storeStateOutput->index() + 2)->origin() == loopvar.pre); - auto lambdaEntrySplit = jlm::rvsdg::output::GetNode(*thetaOutput->input()->origin()); + auto lambdaEntrySplit = jlm::rvsdg::output::GetNode(*loopvar.input->origin()); assert(is(*lambdaEntrySplit, 1, 2)); } @@ -1470,17 +1470,17 @@ ValidateThetaTestSteensgaardAgnosticTopDown(const jlm::tests::ThetaTest & test) jlm::rvsdg::output::GetNode(*test.lambda->GetFunctionResults()[0]->origin()); assert(is(*lambda_exit_mux, 2, 1)); - auto thetaOutput = - jlm::util::AssertedCast(lambda_exit_mux->input(0)->origin()); - auto theta = jlm::rvsdg::output::GetNode(*thetaOutput); + auto thetaOutput = lambda_exit_mux->input(0)->origin(); + auto theta = jlm::rvsdg::TryGetOwnerNode(*thetaOutput); assert(theta == test.theta); + auto loopvar = theta->MapOutputLoopVar(*thetaOutput); - auto storeStateOutput = thetaOutput->result()->origin(); + auto storeStateOutput = loopvar.post->origin(); auto store = jlm::rvsdg::output::GetNode(*storeStateOutput); assert(is(*store, 4, 2)); - assert(store->input(storeStateOutput->index() + 2)->origin() == thetaOutput->argument()); + assert(store->input(storeStateOutput->index() + 2)->origin() == loopvar.pre); - auto lambda_entry_mux = jlm::rvsdg::output::GetNode(*thetaOutput->input()->origin()); + auto lambda_entry_mux = jlm::rvsdg::output::GetNode(*loopvar.input->origin()); assert(is(*lambda_entry_mux, 1, 2)); } diff --git a/tests/jlm/llvm/opt/alias-analyses/TestSteensgaard.cpp b/tests/jlm/llvm/opt/alias-analyses/TestSteensgaard.cpp index 06bc14d98..f80b5c8b8 100644 --- a/tests/jlm/llvm/opt/alias-analyses/TestSteensgaard.cpp +++ b/tests/jlm/llvm/opt/alias-analyses/TestSteensgaard.cpp @@ -702,7 +702,7 @@ TestTheta() auto & gepOutput = pointsToGraph.GetRegisterNode(*test.gep->output(0)); - auto & thetaArgument2 = pointsToGraph.GetRegisterNode(*test.theta->output(2)->argument()); + auto & thetaArgument2 = pointsToGraph.GetRegisterNode(*test.theta->GetLoopVars()[2].pre); auto & thetaOutput2 = pointsToGraph.GetRegisterNode(*test.theta->output(2)); assertTargets(lambdaArgument1, { &lambda, &pointsToGraph.GetExternalMemoryNode() }); diff --git a/tests/jlm/llvm/opt/test-cne.cpp b/tests/jlm/llvm/opt/test-cne.cpp index 446307e9b..6df22e575 100644 --- a/tests/jlm/llvm/opt/test-cne.cpp +++ b/tests/jlm/llvm/opt/test-cne.cpp @@ -148,24 +148,24 @@ test_theta() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); auto region = theta->subregion(); - auto lv1 = theta->add_loopvar(c); - auto lv2 = theta->add_loopvar(x); - auto lv3 = theta->add_loopvar(x); - auto lv4 = theta->add_loopvar(x); + auto lv1 = theta->AddLoopVar(c); + auto lv2 = theta->AddLoopVar(x); + auto lv3 = theta->AddLoopVar(x); + auto lv4 = theta->AddLoopVar(x); - auto u1 = jlm::tests::create_testop(region, { lv2->argument() }, { vt })[0]; - auto u2 = jlm::tests::create_testop(region, { lv3->argument() }, { vt })[0]; - auto b1 = jlm::tests::create_testop(region, { lv3->argument(), lv4->argument() }, { vt })[0]; + auto u1 = jlm::tests::create_testop(region, { lv2.pre }, { vt })[0]; + auto u2 = jlm::tests::create_testop(region, { lv3.pre }, { vt })[0]; + auto b1 = jlm::tests::create_testop(region, { lv3.pre, lv4.pre }, { vt })[0]; - lv2->result()->divert_to(u1); - lv3->result()->divert_to(u2); - lv4->result()->divert_to(b1); + lv2.post->divert_to(u1); + lv3.post->divert_to(u2); + lv4.post->divert_to(b1); - theta->set_predicate(lv1->argument()); + theta->set_predicate(lv1.pre); - GraphExport::Create(*theta->output(1), "lv2"); - GraphExport::Create(*theta->output(2), "lv3"); - GraphExport::Create(*theta->output(3), "lv4"); + GraphExport::Create(*lv2.output, "lv2"); + GraphExport::Create(*lv3.output, "lv3"); + GraphExport::Create(*lv4.output, "lv4"); // jlm::rvsdg::view(graph.GetRootRegion(), stdout); jlm::llvm::cne cne; @@ -201,29 +201,29 @@ test_theta2() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); auto region = theta->subregion(); - auto lv1 = theta->add_loopvar(c); - auto lv2 = theta->add_loopvar(x); - auto lv3 = theta->add_loopvar(x); + auto lv1 = theta->AddLoopVar(c); + auto lv2 = theta->AddLoopVar(x); + auto lv3 = theta->AddLoopVar(x); - auto u1 = jlm::tests::create_testop(region, { lv2->argument() }, { vt })[0]; - auto u2 = jlm::tests::create_testop(region, { lv3->argument() }, { vt })[0]; + auto u1 = jlm::tests::create_testop(region, { lv2.pre }, { vt })[0]; + auto u2 = jlm::tests::create_testop(region, { lv3.pre }, { vt })[0]; auto b1 = jlm::tests::create_testop(region, { u2, u2 }, { vt })[0]; - lv2->result()->divert_to(u1); - lv3->result()->divert_to(b1); + lv2.post->divert_to(u1); + lv3.post->divert_to(b1); - theta->set_predicate(lv1->argument()); + theta->set_predicate(lv1.pre); - GraphExport::Create(*theta->output(1), "lv2"); - GraphExport::Create(*theta->output(2), "lv3"); + GraphExport::Create(*lv2.output, "lv2"); + GraphExport::Create(*lv3.output, "lv3"); // jlm::rvsdg::view(graph, stdout); jlm::llvm::cne cne; cne.run(rm, statisticsCollector); // jlm::rvsdg::view(graph, stdout); - assert(lv2->result()->origin() == u1); - assert(lv2->argument()->nusers() != 0 && lv3->argument()->nusers() != 0); + assert(lv2.post->origin() == u1); + assert(lv2.pre->nusers() != 0 && lv3.pre->nusers() != 0); } static inline void @@ -245,32 +245,32 @@ test_theta3() auto theta1 = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); auto r1 = theta1->subregion(); - auto lv1 = theta1->add_loopvar(c); - auto lv2 = theta1->add_loopvar(x); - auto lv3 = theta1->add_loopvar(x); - auto lv4 = theta1->add_loopvar(x); + auto lv1 = theta1->AddLoopVar(c); + auto lv2 = theta1->AddLoopVar(x); + auto lv3 = theta1->AddLoopVar(x); + auto lv4 = theta1->AddLoopVar(x); auto theta2 = jlm::rvsdg::ThetaNode::create(r1); auto r2 = theta2->subregion(); - auto p = theta2->add_loopvar(lv1->argument()); - theta2->add_loopvar(lv2->argument()); - theta2->add_loopvar(lv3->argument()); - theta2->add_loopvar(lv4->argument()); - theta2->set_predicate(p->argument()); + auto p = theta2->AddLoopVar(lv1.pre); + auto p2 = theta2->AddLoopVar(lv2.pre); + auto p3 = theta2->AddLoopVar(lv3.pre); + auto p4 = theta2->AddLoopVar(lv4.pre); + theta2->set_predicate(p.pre); - auto u1 = jlm::tests::test_op::create(r1, { theta2->output(1) }, { vt }); - auto b1 = jlm::tests::test_op::create(r1, { theta2->output(2), theta2->output(2) }, { vt }); - auto u2 = jlm::tests::test_op::create(r1, { theta2->output(3) }, { vt }); + auto u1 = jlm::tests::test_op::create(r1, { p2.output }, { vt }); + auto b1 = jlm::tests::test_op::create(r1, { p3.output, p3.output }, { vt }); + auto u2 = jlm::tests::test_op::create(r1, { p4.output }, { vt }); - lv2->result()->divert_to(u1->output(0)); - lv3->result()->divert_to(b1->output(0)); - lv4->result()->divert_to(u1->output(0)); + lv2.post->divert_to(u1->output(0)); + lv3.post->divert_to(b1->output(0)); + lv4.post->divert_to(u1->output(0)); - theta1->set_predicate(lv1->argument()); + theta1->set_predicate(lv1.pre); - GraphExport::Create(*theta1->output(1), "lv2"); - GraphExport::Create(*theta1->output(2), "lv3"); - GraphExport::Create(*theta1->output(3), "lv4"); + GraphExport::Create(*lv2.output, "lv2"); + GraphExport::Create(*lv3.output, "lv3"); + GraphExport::Create(*lv4.output, "lv4"); // jlm::rvsdg::view(graph, stdout); jlm::llvm::cne cne; @@ -305,23 +305,23 @@ test_theta4() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); auto region = theta->subregion(); - auto lv1 = theta->add_loopvar(c); - auto lv2 = theta->add_loopvar(x); - auto lv3 = theta->add_loopvar(x); - auto lv4 = theta->add_loopvar(y); - auto lv5 = theta->add_loopvar(y); - auto lv6 = theta->add_loopvar(x); - auto lv7 = theta->add_loopvar(x); + auto lv1 = theta->AddLoopVar(c); + auto lv2 = theta->AddLoopVar(x); + auto lv3 = theta->AddLoopVar(x); + auto lv4 = theta->AddLoopVar(y); + auto lv5 = theta->AddLoopVar(y); + auto lv6 = theta->AddLoopVar(x); + auto lv7 = theta->AddLoopVar(x); - auto u1 = jlm::tests::test_op::create(region, { lv2->argument() }, { vt }); - auto b1 = jlm::tests::test_op::create(region, { lv3->argument(), lv3->argument() }, { vt }); + auto u1 = jlm::tests::test_op::create(region, { lv2.pre }, { vt }); + auto b1 = jlm::tests::test_op::create(region, { lv3.pre, lv3.pre }, { vt }); - lv2->result()->divert_to(lv4->argument()); - lv3->result()->divert_to(lv5->argument()); - lv4->result()->divert_to(u1->output(0)); - lv5->result()->divert_to(b1->output(0)); + lv2.post->divert_to(lv4.pre); + lv3.post->divert_to(lv5.pre); + lv4.post->divert_to(u1->output(0)); + lv5.post->divert_to(b1->output(0)); - theta->set_predicate(lv1->argument()); + theta->set_predicate(lv1.pre); auto & ex1 = GraphExport::Create(*theta->output(1), "lv2"); auto & ex2 = GraphExport::Create(*theta->output(2), "lv3"); @@ -334,8 +334,8 @@ test_theta4() // jlm::rvsdg::view(graph, stdout); assert(ex1.origin() != ex2.origin()); - assert(lv2->argument()->nusers() != 0 && lv3->argument()->nusers() != 0); - assert(lv6->result()->origin() == lv7->result()->origin()); + assert(lv2.pre->nusers() != 0 && lv3.pre->nusers() != 0); + assert(lv6.post->origin() == lv7.post->origin()); } static inline void @@ -358,16 +358,16 @@ test_theta5() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); auto region = theta->subregion(); - auto lv0 = theta->add_loopvar(c); - auto lv1 = theta->add_loopvar(x); - auto lv2 = theta->add_loopvar(x); - auto lv3 = theta->add_loopvar(y); - auto lv4 = theta->add_loopvar(y); + auto lv0 = theta->AddLoopVar(c); + auto lv1 = theta->AddLoopVar(x); + auto lv2 = theta->AddLoopVar(x); + auto lv3 = theta->AddLoopVar(y); + auto lv4 = theta->AddLoopVar(y); - lv1->result()->divert_to(lv3->argument()); - lv2->result()->divert_to(lv4->argument()); + lv1.post->divert_to(lv3.pre); + lv2.post->divert_to(lv4.pre); - theta->set_predicate(lv0->argument()); + theta->set_predicate(lv0.pre); auto & ex1 = GraphExport::Create(*theta->output(1), "lv1"); auto & ex2 = GraphExport::Create(*theta->output(2), "lv2"); diff --git a/tests/jlm/llvm/opt/test-inversion.cpp b/tests/jlm/llvm/opt/test-inversion.cpp index e37700d68..f87c7ba2a 100644 --- a/tests/jlm/llvm/opt/test-inversion.cpp +++ b/tests/jlm/llvm/opt/test-inversion.cpp @@ -31,20 +31,20 @@ test1() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lvx = theta->add_loopvar(x); - auto lvy = theta->add_loopvar(y); - theta->add_loopvar(z); + auto lvx = theta->AddLoopVar(x); + auto lvy = theta->AddLoopVar(y); + theta->AddLoopVar(z); auto a = jlm::tests::create_testop( theta->subregion(), - { lvx->argument(), lvy->argument() }, + { lvx.pre, lvy.pre }, { jlm::rvsdg::bittype::Create(1) })[0]; auto predicate = jlm::rvsdg::match(1, { { 1, 0 } }, 1, 2, a); auto gamma = jlm::rvsdg::GammaNode::create(predicate, 2); - auto evx = gamma->AddEntryVar(lvx->argument()); - auto evy = gamma->AddEntryVar(lvy->argument()); + auto evx = gamma->AddEntryVar(lvx.pre); + auto evy = gamma->AddEntryVar(lvy.pre); auto b = jlm::tests::create_testop( gamma->subregion(0), @@ -57,7 +57,7 @@ test1() auto xvy = gamma->AddExitVar({ b, c }); - lvy->result()->divert_to(xvy.output); + lvy.post->divert_to(xvy.output); theta->set_predicate(predicate); @@ -87,26 +87,26 @@ test2() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lv1 = theta->add_loopvar(x); + auto lv1 = theta->AddLoopVar(x); auto n1 = jlm::tests::create_testop( theta->subregion(), - { lv1->argument() }, + { lv1.pre }, { jlm::rvsdg::bittype::Create(1) })[0]; - auto n2 = jlm::tests::create_testop(theta->subregion(), { lv1->argument() }, { vt })[0]; + auto n2 = jlm::tests::create_testop(theta->subregion(), { lv1.pre }, { vt })[0]; auto predicate = jlm::rvsdg::match(1, { { 1, 0 } }, 1, 2, n1); auto gamma = jlm::rvsdg::GammaNode::create(predicate, 2); auto ev1 = gamma->AddEntryVar(n1); - auto ev2 = gamma->AddEntryVar(lv1->argument()); + auto ev2 = gamma->AddEntryVar(lv1.pre); auto ev3 = gamma->AddEntryVar(n2); gamma->AddExitVar(ev1.branchArgument); gamma->AddExitVar(ev2.branchArgument); gamma->AddExitVar(ev3.branchArgument); - lv1->result()->divert_to(gamma->output(1)); + lv1.post->divert_to(gamma->output(1)); theta->set_predicate(predicate); diff --git a/tests/jlm/llvm/opt/test-push.cpp b/tests/jlm/llvm/opt/test-push.cpp index ab4a5f39d..7cca44259 100644 --- a/tests/jlm/llvm/opt/test-push.cpp +++ b/tests/jlm/llvm/opt/test-push.cpp @@ -76,23 +76,20 @@ test_theta() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lv1 = theta->add_loopvar(c); - auto lv2 = theta->add_loopvar(x); - auto lv3 = theta->add_loopvar(x); - auto lv4 = theta->add_loopvar(s); + auto lv1 = theta->AddLoopVar(c); + auto lv2 = theta->AddLoopVar(x); + auto lv3 = theta->AddLoopVar(x); + auto lv4 = theta->AddLoopVar(s); auto o1 = jlm::tests::create_testop(theta->subregion(), {}, { vt })[0]; - auto o2 = jlm::tests::create_testop(theta->subregion(), { o1, lv3->argument() }, { vt })[0]; - auto o3 = jlm::tests::create_testop(theta->subregion(), { lv2->argument(), o2 }, { vt })[0]; - auto o4 = jlm::tests::create_testop( - theta->subregion(), - { lv3->argument(), lv4->argument() }, - { st })[0]; + auto o2 = jlm::tests::create_testop(theta->subregion(), { o1, lv3.pre }, { vt })[0]; + auto o3 = jlm::tests::create_testop(theta->subregion(), { lv2.pre, o2 }, { vt })[0]; + auto o4 = jlm::tests::create_testop(theta->subregion(), { lv3.pre, lv4.pre }, { st })[0]; - lv2->result()->divert_to(o3); - lv4->result()->divert_to(o4); + lv2.post->divert_to(o3); + lv4.post->divert_to(o4); - theta->set_predicate(lv1->argument()); + theta->set_predicate(lv1.pre); GraphExport::Create(*theta->output(0), "c"); @@ -121,18 +118,17 @@ test_push_theta_bottom() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lvc = theta->add_loopvar(c); - auto lva = theta->add_loopvar(a); - auto lvv = theta->add_loopvar(v); - auto lvs = theta->add_loopvar(s); + auto lvc = theta->AddLoopVar(c); + auto lva = theta->AddLoopVar(a); + auto lvv = theta->AddLoopVar(v); + auto lvs = theta->AddLoopVar(s); - auto s1 = - StoreNonVolatileNode::Create(lva->argument(), lvv->argument(), { lvs->argument() }, 4)[0]; + auto s1 = StoreNonVolatileNode::Create(lva.pre, lvv.pre, { lvs.pre }, 4)[0]; - lvs->result()->divert_to(s1); - theta->set_predicate(lvc->argument()); + lvs.post->divert_to(s1); + theta->set_predicate(lvc.pre); - auto & ex = GraphExport::Create(*lvs, "s"); + auto & ex = GraphExport::Create(*lvs.output, "s"); jlm::rvsdg::view(graph, stdout); jlm::llvm::push_bottom(theta); diff --git a/tests/jlm/llvm/opt/test-unroll.cpp b/tests/jlm/llvm/opt/test-unroll.cpp index f901dd3e4..c6677b524 100644 --- a/tests/jlm/llvm/opt/test-unroll.cpp +++ b/tests/jlm/llvm/opt/test-unroll.cpp @@ -47,15 +47,15 @@ create_theta( auto theta = ThetaNode::create(&graph->GetRootRegion()); auto subregion = theta->subregion(); - auto idv = theta->add_loopvar(init); - auto lvs = theta->add_loopvar(step); - auto lve = theta->add_loopvar(end); + auto idv = theta->AddLoopVar(init); + auto lvs = theta->AddLoopVar(step); + auto lve = theta->AddLoopVar(end); - auto arm = SimpleNode::create_normalized(subregion, aop, { idv->argument(), lvs->argument() })[0]; - auto cmp = SimpleNode::create_normalized(subregion, cop, { arm, lve->argument() })[0]; + auto arm = SimpleNode::create_normalized(subregion, aop, { idv.pre, lvs.pre })[0]; + auto cmp = SimpleNode::create_normalized(subregion, cop, { arm, lve.pre })[0]; auto match = jlm::rvsdg::match(1, { { 1, 1 } }, 0, 2, cmp); - idv->result()->divert_to(arm); + idv.post->divert_to(arm); theta->set_predicate(match); return theta; @@ -87,7 +87,7 @@ test_unrollinfo() assert(!ui->is_known()); assert(!ui->niterations()); assert(ui->theta() == theta); - assert(ui->idv()->input()->origin() == x); + assert(theta->MapPreLoopVar(*ui->idv()).input->origin() == x); } { @@ -242,19 +242,19 @@ test_unknown_boundaries() auto y = &jlm::tests::GraphImport::Create(graph, bt, "y"); auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lv1 = theta->add_loopvar(x); - auto lv2 = theta->add_loopvar(y); + auto lv1 = theta->AddLoopVar(x); + auto lv2 = theta->AddLoopVar(y); auto one = jlm::rvsdg::create_bitconstant(theta->subregion(), 32, 1); - auto add = jlm::rvsdg::bitadd_op::create(32, lv1->argument(), one); - auto cmp = jlm::rvsdg::bitult_op::create(32, add, lv2->argument()); + auto add = jlm::rvsdg::bitadd_op::create(32, lv1.pre, one); + auto cmp = jlm::rvsdg::bitult_op::create(32, add, lv2.pre); auto match = jlm::rvsdg::match(1, { { 1, 0 } }, 1, 2, cmp); - lv1->result()->divert_to(add); + lv1.post->divert_to(add); theta->set_predicate(match); - auto & ex1 = GraphExport::Create(*lv1, "x"); + auto & ex1 = GraphExport::Create(*lv1.output, "x"); // jlm::rvsdg::view(graph, stdout); jlm::llvm::loopunroll loopunroll(2); @@ -301,59 +301,59 @@ test_nested_theta() /* Outer loop */ auto otheta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lvo_init = otheta->add_loopvar(init); - auto lvo_step = otheta->add_loopvar(step); - auto lvo_end = otheta->add_loopvar(end); + auto lvo_init = otheta->AddLoopVar(init); + auto lvo_step = otheta->AddLoopVar(step); + auto lvo_end = otheta->AddLoopVar(end); - auto add = jlm::rvsdg::bitadd_op::create(32, lvo_init->argument(), lvo_step->argument()); - auto compare = jlm::rvsdg::bitult_op::create(32, add, lvo_end->argument()); + auto add = jlm::rvsdg::bitadd_op::create(32, lvo_init.pre, lvo_step.pre); + auto compare = jlm::rvsdg::bitult_op::create(32, add, lvo_end.pre); auto match = jlm::rvsdg::match(1, { { 1, 1 } }, 0, 2, compare); otheta->set_predicate(match); - lvo_init->result()->divert_to(add); + lvo_init.post->divert_to(add); /* First inner loop in the original loop */ auto inner_theta = jlm::rvsdg::ThetaNode::create(otheta->subregion()); auto inner_init = jlm::rvsdg::create_bitconstant(otheta->subregion(), 32, 0); - auto lvi_init = inner_theta->add_loopvar(inner_init); - auto lvi_step = inner_theta->add_loopvar(lvo_step->argument()); - auto lvi_end = inner_theta->add_loopvar(lvo_end->argument()); + auto lvi_init = inner_theta->AddLoopVar(inner_init); + auto lvi_step = inner_theta->AddLoopVar(lvo_step.pre); + auto lvi_end = inner_theta->AddLoopVar(lvo_end.pre); - auto inner_add = jlm::rvsdg::bitadd_op::create(32, lvi_init->argument(), lvi_step->argument()); - auto inner_compare = jlm::rvsdg::bitult_op::create(32, inner_add, lvi_end->argument()); + auto inner_add = jlm::rvsdg::bitadd_op::create(32, lvi_init.pre, lvi_step.pre); + auto inner_compare = jlm::rvsdg::bitult_op::create(32, inner_add, lvi_end.pre); auto inner_match = jlm::rvsdg::match(1, { { 1, 1 } }, 0, 2, inner_compare); inner_theta->set_predicate(inner_match); - lvi_init->result()->divert_to(inner_add); + lvi_init.post->divert_to(inner_add); /* Nested inner loop */ auto inner_nested_theta = jlm::rvsdg::ThetaNode::create(inner_theta->subregion()); auto inner_nested_init = jlm::rvsdg::create_bitconstant(inner_theta->subregion(), 32, 0); - auto lvi_nested_init = inner_nested_theta->add_loopvar(inner_nested_init); - auto lvi_nested_step = inner_nested_theta->add_loopvar(lvi_step->argument()); - auto lvi_nested_end = inner_nested_theta->add_loopvar(lvi_end->argument()); + auto lvi_nested_init = inner_nested_theta->AddLoopVar(inner_nested_init); + auto lvi_nested_step = inner_nested_theta->AddLoopVar(lvi_step.pre); + auto lvi_nested_end = inner_nested_theta->AddLoopVar(lvi_end.pre); auto inner_nested_add = - jlm::rvsdg::bitadd_op::create(32, lvi_nested_init->argument(), lvi_nested_step->argument()); + jlm::rvsdg::bitadd_op::create(32, lvi_nested_init.pre, lvi_nested_step.pre); auto inner_nested_compare = - jlm::rvsdg::bitult_op::create(32, inner_nested_add, lvi_nested_end->argument()); + jlm::rvsdg::bitult_op::create(32, inner_nested_add, lvi_nested_end.pre); auto inner_nested_match = jlm::rvsdg::match(1, { { 1, 1 } }, 0, 2, inner_nested_compare); inner_nested_theta->set_predicate(inner_nested_match); - lvi_nested_init->result()->divert_to(inner_nested_add); + lvi_nested_init.post->divert_to(inner_nested_add); /* Second inner loop in the original loop */ auto inner2_theta = jlm::rvsdg::ThetaNode::create(otheta->subregion()); auto inner2_init = jlm::rvsdg::create_bitconstant(otheta->subregion(), 32, 0); - auto lvi2_init = inner2_theta->add_loopvar(inner2_init); - auto lvi2_step = inner2_theta->add_loopvar(lvo_step->argument()); - auto lvi2_end = inner2_theta->add_loopvar(lvo_end->argument()); + auto lvi2_init = inner2_theta->AddLoopVar(inner2_init); + auto lvi2_step = inner2_theta->AddLoopVar(lvo_step.pre); + auto lvi2_end = inner2_theta->AddLoopVar(lvo_end.pre); - auto inner2_add = jlm::rvsdg::bitadd_op::create(32, lvi2_init->argument(), lvi2_step->argument()); - auto inner2_compare = jlm::rvsdg::bitult_op::create(32, inner2_add, lvi2_end->argument()); + auto inner2_add = jlm::rvsdg::bitadd_op::create(32, lvi2_init.pre, lvi2_step.pre); + auto inner2_compare = jlm::rvsdg::bitult_op::create(32, inner2_add, lvi2_end.pre); auto inner2_match = jlm::rvsdg::match(1, { { 1, 1 } }, 0, 2, inner2_compare); inner2_theta->set_predicate(inner2_match); - lvi2_init->result()->divert_to(inner2_add); + lvi2_init.post->divert_to(inner2_add); // jlm::rvsdg::view(graph, stdout); jlm::llvm::loopunroll loopunroll(4); diff --git a/tests/jlm/mlir/backend/TestJlmToMlirConverter.cpp b/tests/jlm/mlir/backend/TestJlmToMlirConverter.cpp index 4dc2d775d..d92e4599a 100644 --- a/tests/jlm/mlir/backend/TestJlmToMlirConverter.cpp +++ b/tests/jlm/mlir/backend/TestJlmToMlirConverter.cpp @@ -634,8 +634,8 @@ TestTheta() auto predicate = jlm::rvsdg::control_constant(rvsdgThetaNode->subregion(), 2, 0); - rvsdgThetaNode->add_loopvar(entryvar1); - rvsdgThetaNode->add_loopvar(entryvar2); + rvsdgThetaNode->AddLoopVar(entryvar1); + rvsdgThetaNode->AddLoopVar(entryvar2); rvsdgThetaNode->set_predicate(predicate); // Convert the RVSDG to MLIR diff --git a/tests/jlm/mlir/frontend/TestMlirToJlmConverter.cpp b/tests/jlm/mlir/frontend/TestMlirToJlmConverter.cpp index 348bf174b..1d9348e6d 100644 --- a/tests/jlm/mlir/frontend/TestMlirToJlmConverter.cpp +++ b/tests/jlm/mlir/frontend/TestMlirToJlmConverter.cpp @@ -998,7 +998,7 @@ TestThetaOp() std::cout << "Checking theta node" << std::endl; assert(thetaNode->ninputs() == 2); - assert(thetaNode->nloopvars() == 2); + assert(thetaNode->GetLoopVars().size() == 2); assert(thetaNode->noutputs() == 2); assert(thetaNode->nsubregions() == 1); assert(is(thetaNode->predicate()->type())); diff --git a/tests/jlm/rvsdg/test-theta.cpp b/tests/jlm/rvsdg/test-theta.cpp index e2b190122..6daefe710 100644 --- a/tests/jlm/rvsdg/test-theta.cpp +++ b/tests/jlm/rvsdg/test-theta.cpp @@ -24,13 +24,13 @@ TestThetaCreation() auto theta = jlm::rvsdg::ThetaNode::create(&graph.GetRootRegion()); - auto lv1 = theta->add_loopvar(imp1); - auto lv2 = theta->add_loopvar(imp2); - auto lv3 = theta->add_loopvar(imp3); + auto lv1 = theta->AddLoopVar(imp1); + auto lv2 = theta->AddLoopVar(imp2); + auto lv3 = theta->AddLoopVar(imp3); - lv2->result()->divert_to(lv3->argument()); - lv3->result()->divert_to(lv3->argument()); - theta->set_predicate(lv1->argument()); + lv2.post->divert_to(lv3.pre); + lv3.post->divert_to(lv3.pre); + theta->set_predicate(lv1.pre); jlm::tests::GraphExport::Create(*theta->output(0), "exp"); auto theta2 = static_cast(theta)->copy( @@ -38,13 +38,13 @@ TestThetaCreation() { imp1, imp2, imp3 }); jlm::rvsdg::view(&graph.GetRootRegion(), stdout); - assert(lv1->node() == theta); - assert(lv2->node() == theta); - assert(lv3->node() == theta); + assert(jlm::rvsdg::TryGetOwnerNode(*lv1.output) == theta); + assert(jlm::rvsdg::TryGetOwnerNode(*lv2.output) == theta); + assert(jlm::rvsdg::TryGetOwnerNode(*lv3.output) == theta); assert(theta->predicate() == theta->subregion()->result(0)); - assert(theta->nloopvars() == 3); - assert((*theta->begin())->result() == theta->subregion()->result(1)); + assert(theta->GetLoopVars().size() == 3); + assert(theta->GetLoopVars()[0].post == theta->subregion()->result(1)); assert(dynamic_cast(theta2)); } @@ -64,30 +64,30 @@ TestRemoveThetaOutputsWhere() auto thetaNode = ThetaNode::create(&rvsdg.GetRootRegion()); - auto thetaOutput0 = thetaNode->add_loopvar(ctl); - auto thetaOutput1 = thetaNode->add_loopvar(x); - auto thetaOutput2 = thetaNode->add_loopvar(y); - thetaNode->set_predicate(thetaOutput0->argument()); + auto thetaOutput0 = thetaNode->AddLoopVar(ctl); + auto thetaOutput1 = thetaNode->AddLoopVar(x); + auto thetaOutput2 = thetaNode->AddLoopVar(y); + thetaNode->set_predicate(thetaOutput0.pre); - jlm::tests::GraphExport::Create(*thetaOutput0, ""); + jlm::tests::GraphExport::Create(*thetaOutput0.output, ""); // Act & Assert auto deadInputs = thetaNode->RemoveThetaOutputsWhere( - [&](const ThetaOutput & output) + [&](const jlm::rvsdg::output & output) { - return output.index() == thetaOutput1->index(); + return output.index() == thetaOutput1.output->index(); }); assert(deadInputs.Size() == 1); assert(deadInputs.Contains(thetaNode->input(1))); assert(thetaNode->noutputs() == 2); assert(thetaNode->subregion()->nresults() == 3); - assert(thetaOutput0->index() == 0); - assert(thetaOutput0->result()->index() == 1); - assert(thetaOutput2->index() == 1); - assert(thetaOutput2->result()->index() == 2); + assert(thetaOutput0.output->index() == 0); + assert(thetaOutput0.post->index() == 1); + assert(thetaOutput2.output->index() == 1); + assert(thetaOutput2.post->index() == 2); deadInputs = thetaNode->RemoveThetaOutputsWhere( - [](const ThetaOutput &) + [](const jlm::rvsdg::output &) { return true; }); @@ -95,8 +95,8 @@ TestRemoveThetaOutputsWhere() assert(deadInputs.Contains(thetaNode->input(2))); assert(thetaNode->noutputs() == 1); assert(thetaNode->subregion()->nresults() == 2); - assert(thetaOutput0->index() == 0); - assert(thetaOutput0->result()->index() == 1); + assert(thetaOutput0.output->index() == 0); + assert(thetaOutput0.post->index() == 1); } static void @@ -114,12 +114,12 @@ TestPruneThetaOutputs() auto thetaNode = ThetaNode::create(&rvsdg.GetRootRegion()); - auto thetaOutput0 = thetaNode->add_loopvar(ctl); - thetaNode->add_loopvar(x); - thetaNode->add_loopvar(y); - thetaNode->set_predicate(thetaOutput0->argument()); + auto thetaOutput0 = thetaNode->AddLoopVar(ctl); + thetaNode->AddLoopVar(x); + thetaNode->AddLoopVar(y); + thetaNode->set_predicate(thetaOutput0.pre); - jlm::tests::GraphExport::Create(*thetaOutput0, ""); + jlm::tests::GraphExport::Create(*thetaOutput0.output, ""); // Act auto deadInputs = thetaNode->PruneThetaOutputs(); @@ -130,8 +130,8 @@ TestPruneThetaOutputs() assert(deadInputs.Contains(thetaNode->input(2))); assert(thetaNode->noutputs() == 1); assert(thetaNode->subregion()->nresults() == 2); - assert(thetaOutput0->index() == 0); - assert(thetaOutput0->result()->index() == 1); + assert(thetaOutput0.output->index() == 0); + assert(thetaOutput0.post->index() == 1); } static void @@ -149,45 +149,46 @@ TestRemoveThetaInputsWhere() auto thetaNode = ThetaNode::create(&rvsdg.GetRootRegion()); - auto thetaOutput0 = thetaNode->add_loopvar(ctl); - auto thetaOutput1 = thetaNode->add_loopvar(x); - auto thetaOutput2 = thetaNode->add_loopvar(y); - thetaNode->set_predicate(thetaOutput0->argument()); + auto thetaOutput0 = thetaNode->AddLoopVar(ctl); + auto thetaOutput1 = thetaNode->AddLoopVar(x); + auto thetaOutput2 = thetaNode->AddLoopVar(y); + thetaNode->set_predicate(thetaOutput0.pre); auto result = jlm::tests::SimpleNode::Create(*thetaNode->subregion(), {}, { valueType }).output(0); - thetaOutput1->result()->divert_to(result); - thetaOutput2->result()->divert_to(result); + thetaOutput1.post->divert_to(result); + thetaOutput2.post->divert_to(result); - jlm::tests::GraphExport::Create(*thetaOutput0, ""); + jlm::tests::GraphExport::Create(*thetaOutput0.output, ""); // Act & Assert auto deadOutputs = thetaNode->RemoveThetaInputsWhere( - [&](const ThetaInput & input) + [&](const jlm::rvsdg::input & input) { - return input.index() == thetaOutput1->input()->index(); + return input.index() == thetaOutput1.input->index(); }); assert(deadOutputs.Size() == 1); assert(deadOutputs.Contains(thetaNode->output(1))); assert(thetaNode->ninputs() == 2); assert(thetaNode->subregion()->narguments() == 2); - assert(thetaOutput0->input()->index() == 0); - assert(thetaOutput0->argument()->index() == 0); - assert(thetaOutput2->input()->index() == 1); - assert(thetaOutput2->argument()->index() == 1); + assert(thetaOutput0.input->index() == 0); + assert(thetaOutput0.pre->index() == 0); + assert(thetaOutput2.input->index() == 1); + assert(thetaOutput2.pre->index() == 1); + auto expectDeadOutput = thetaNode->output(2); deadOutputs = thetaNode->RemoveThetaInputsWhere( - [](const ThetaInput &) + [](const jlm::rvsdg::input & input) { return true; }); assert(deadOutputs.Size() == 1); - assert(deadOutputs.Contains(thetaNode->output(2))); + assert(deadOutputs.Contains(expectDeadOutput)); assert(thetaNode->ninputs() == 1); assert(thetaNode->subregion()->narguments() == 1); - assert(thetaOutput0->input()->index() == 0); - assert(thetaOutput0->argument()->index() == 0); + assert(thetaOutput0.input->index() == 0); + assert(thetaOutput0.pre->index() == 0); } static void @@ -205,18 +206,18 @@ TestPruneThetaInputs() auto thetaNode = ThetaNode::create(&rvsdg.GetRootRegion()); - auto thetaOutput0 = thetaNode->add_loopvar(ctl); - auto thetaOutput1 = thetaNode->add_loopvar(x); - auto thetaOutput2 = thetaNode->add_loopvar(y); - thetaNode->set_predicate(thetaOutput0->argument()); + auto thetaOutput0 = thetaNode->AddLoopVar(ctl); + auto thetaOutput1 = thetaNode->AddLoopVar(x); + auto thetaOutput2 = thetaNode->AddLoopVar(y); + thetaNode->set_predicate(thetaOutput0.pre); auto result = jlm::tests::SimpleNode::Create(*thetaNode->subregion(), {}, { valueType }).output(0); - thetaOutput1->result()->divert_to(result); - thetaOutput2->result()->divert_to(result); + thetaOutput1.post->divert_to(result); + thetaOutput2.post->divert_to(result); - jlm::tests::GraphExport::Create(*thetaOutput0, ""); + jlm::tests::GraphExport::Create(*thetaOutput0.output, ""); // Act auto deadOutputs = thetaNode->PruneThetaInputs(); @@ -227,8 +228,8 @@ TestPruneThetaInputs() assert(deadOutputs.Contains(thetaNode->output(2))); assert(thetaNode->ninputs() == 1); assert(thetaNode->subregion()->narguments() == 1); - assert(thetaOutput0->input()->index() == 0); - assert(thetaOutput0->argument()->index() == 0); + assert(thetaOutput0.input->index() == 0); + assert(thetaOutput0.pre->index() == 0); } static int