From 0407f6c22c8cc4db9d9345a99180103c9222ab5b Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Mon, 29 Jul 2024 10:06:52 -0400 Subject: [PATCH] Generalize fgOptimizeBranch --- src/coreclr/jit/fgopt.cpp | 50 +++++++++++++-------------------------- 1 file changed, 16 insertions(+), 34 deletions(-) diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index 3efa813eda66c4..3b7066b65c1643 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -2508,7 +2508,7 @@ void Compiler::fgRemoveConditionalJump(BasicBlock* block) // // Currently we require that the conditional branch jump back to the block that follows the unconditional // branch. We can improve the code execution and layout by concatenating a copy of the conditional branch -// block at the end of the conditional branch and reversing the sense of the branch. +// block at the end of the conditional branch. // // This is only done when the amount of code to be copied is smaller than our calculated threshold // in maxDupCostSz. @@ -2520,12 +2520,14 @@ void Compiler::fgRemoveConditionalJump(BasicBlock* block) // bool Compiler::fgOptimizeBranch(BasicBlock* bJump) { - if (opts.MinOpts()) + if (!bJump->KindIs(BBJ_ALWAYS)) { return false; } - if (!bJump->KindIs(BBJ_ALWAYS)) + BasicBlock* bDest = bJump->GetTarget(); + + if (!bDest->KindIs(BBJ_COND)) { return false; } @@ -2536,25 +2538,21 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump) return false; } - if (bJump->HasFlag(BBF_KEEP_BBJ_ALWAYS)) + if (!bJump->NextIs(bDest->GetTrueTarget()) && !bJump->NextIs(bDest->GetFalseTarget())) { return false; } - // Don't hoist a conditional branch into the scratch block; we'd prefer it stay BBJ_ALWAYS. - if (fgBBisScratch(bJump)) - { - return false; - } - - BasicBlock* bDest = bJump->GetTarget(); + // bJump is followed by one of bDest's successors, so bJump cannot be the last block + assert(!bJump->IsLast()); - if (!bDest->KindIs(BBJ_COND)) + if (bJump->HasFlag(BBF_KEEP_BBJ_ALWAYS)) { return false; } - if (!bJump->NextIs(bDest->GetTrueTarget())) + // Don't hoist a conditional branch into the scratch block; we'd prefer it stay BBJ_ALWAYS. + if (fgBBisScratch(bJump)) { return false; } @@ -2566,13 +2564,6 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump) return false; } - // do not jump into another try region - BasicBlock* bDestNormalTarget = bDest->GetFalseTarget(); - if (bDestNormalTarget->hasTryIndex() && !BasicBlock::sameTryRegion(bJump, bDestNormalTarget)) - { - return false; - } - // This function is only called by fgReorderBlocks, which we do not run in the backend. // If we wanted to run block reordering in the backend, we would need to be able to // calculate cost information for LIR on a per-node basis in order for this function @@ -2605,7 +2596,7 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump) bool rareDest = bDest->isRunRarely(); bool rareNext = bJump->Next()->isRunRarely(); - // If we have profile data then we calculate the number of time + // If we have profile data then we calculate the number of times // the loop will iterate into loopIterations if (fgIsUsingProfileWeights()) { @@ -2639,7 +2630,7 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump) // // Branches between the hot and rarely run regions - // should be minimized. So we allow a larger size + // should be minimized. So we allow a larger size // if (rareDest != rareJump) { @@ -2652,7 +2643,7 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump) } // - // We we are ngen-ing: + // We are ngen-ing: // If the uncondional branch is a rarely run block then // we are willing to have more code expansion since we // won't be running code from this page @@ -2752,11 +2743,6 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump) newStmtList->SetPrevStmt(newLastStmt); } - // - // Reverse the sense of the compare - // - gtReverseCond(condTree); - // We need to update the following flags of the bJump block if they were set in the bDest block bJump->CopyFlags(bDest, BBF_COPY_PROPAGATE); @@ -2775,13 +2761,9 @@ bool Compiler::fgOptimizeBranch(BasicBlock* bJump) FlowEdge* const destFalseEdge = bDest->GetFalseEdge(); FlowEdge* const destTrueEdge = bDest->GetTrueEdge(); - // bJump now falls through into the next block - // - FlowEdge* const falseEdge = fgAddRefPred(bJump->Next(), bJump, destFalseEdge); + FlowEdge* const falseEdge = fgAddRefPred(destFalseEdge->getDestinationBlock(), bJump, destFalseEdge); - // bJump now jumps to bDest's normal jump target - // - fgRedirectTargetEdge(bJump, bDestNormalTarget); + fgRedirectTargetEdge(bJump, destTrueEdge->getDestinationBlock()); bJump->GetTargetEdge()->setLikelihood(destTrueEdge->getLikelihood()); bJump->SetCond(bJump->GetTargetEdge(), falseEdge);