diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h index f824dfd24c1bb5..2555d33a244633 100644 --- a/src/coreclr/jit/gentree.h +++ b/src/coreclr/jit/gentree.h @@ -548,6 +548,8 @@ enum GenTreeFlags : unsigned int GTF_MDARRLOWERBOUND_NONFAULTING = 0x20000000, // GT_MDARR_LOWER_BOUND -- An MD array lower bound operation that cannot fault. Same as GT_IND_NONFAULTING. + GTF_ALLOCOBJ_EMPTY_STATIC = 0x80000000, // GT_ALLOCOBJ -- allocation site is part of an empty static pattern + #ifdef FEATURE_HW_INTRINSICS GTF_HW_EM_OP = 0x10000000, // GT_HWINTRINSIC -- node is used as an operand to an embedded mask GTF_HW_USER_CALL = 0x20000000, // GT_HWINTRINSIC -- node is implemented via a user call diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp index b20d0138218438..d66e8ca79c489c 100644 --- a/src/coreclr/jit/importer.cpp +++ b/src/coreclr/jit/importer.cpp @@ -8968,6 +8968,15 @@ void Compiler::impImportBlockCode(BasicBlock* block) return; } + // Flag if this allocation happens within a method that uses the static empty + // pattern (if we stack allocate this object, we can optimize the empty side away) + // + if (lookupNamedIntrinsic(info.compMethodHnd) == NI_System_SZArrayHelper_GetEnumerator) + { + JITDUMP("Allocation is part of empty static pattern\n"); + op1->gtFlags |= GTF_ALLOCOBJ_EMPTY_STATIC; + } + // Remember that this basic block contains 'new' of an object block->SetFlags(BBF_HAS_NEWOBJ); optMethodFlags |= OMF_HAS_NEWOBJ; diff --git a/src/coreclr/jit/objectalloc.cpp b/src/coreclr/jit/objectalloc.cpp index 89e28c5978c6d4..66837d6d280727 100644 --- a/src/coreclr/jit/objectalloc.cpp +++ b/src/coreclr/jit/objectalloc.cpp @@ -623,6 +623,58 @@ unsigned int ObjectAllocator::MorphAllocObjNodeIntoStackAlloc( comp->fgInsertStmtBefore(block, stmt, initStmt); + // If this allocation is part the special empty static pattern, find the controlling + // branch and force control to always flow to the new instance side. + // + if ((allocObj->gtFlags & GTF_ALLOCOBJ_EMPTY_STATIC) != 0) + { + BasicBlock* const predBlock = block->GetUniquePred(comp); + assert(predBlock != nullptr); + assert(predBlock->KindIs(BBJ_COND)); + + JITDUMP("Empty static pattern controlled by " FMT_BB ", optimizing to always use stack allocated instance\n", + predBlock->bbNum); + Statement* const controllingStmt = predBlock->lastStmt(); + GenTree* const controllingNode = controllingStmt->GetRootNode(); + assert(controllingNode->OperIs(GT_JTRUE)); + + FlowEdge* const trueEdge = predBlock->GetTrueEdge(); + FlowEdge* const falseEdge = predBlock->GetFalseEdge(); + FlowEdge* keptEdge = nullptr; + FlowEdge* removedEdge = nullptr; + + if (trueEdge->getDestinationBlock() == block) + { + keptEdge = trueEdge; + removedEdge = falseEdge; + } + else + { + assert(falseEdge->getDestinationBlock() == block); + keptEdge = falseEdge; + removedEdge = trueEdge; + } + + BasicBlock* removedBlock = removedEdge->getDestinationBlock(); + comp->fgRemoveRefPred(removedEdge); + predBlock->SetKindAndTargetEdge(BBJ_ALWAYS, keptEdge); + + if (predBlock->hasProfileWeight()) + { + block->setBBProfileWeight(predBlock->bbWeight); + } + + // Just lop off the JTRUE, the rest can clean up later + // (eg may have side effects) + // + controllingStmt->SetRootNode(controllingNode->AsOp()->gtOp1); + + // We must remove the empty static block now too. + assert(removedBlock->bbRefs == 0); + assert(removedBlock->KindIs(BBJ_ALWAYS)); + comp->fgRemoveBlock(removedBlock, /* unreachable */ true); + } + return lclNum; }