From e19ef1aab524ef10a2d118adcd9f6fd6ca2d7ca9 Mon Sep 17 00:00:00 2001 From: Florian Hahn Date: Tue, 25 Aug 2020 08:43:32 +0100 Subject: [PATCH] [DSE,MemorySSA] Cache accesses with/without reachable read-clobbers. Currently we repeatedly check the same uses for read clobbers in some cases. We can avoid unnecessary checks by keeping track of the memory accesses we already found read clobbers for. To do so, we just add memory access causing read-clobbers to a set. Note that marking all visited accesses as read-clobbers would be to pessimistic, as that might include accesses not on any path to the actual read clobber. If we do not find any read-clobbers, we can add all visited instructions to another set and use that to skip the same accesses in the next call. Reviewed By: asbirlea Differential Revision: https://reviews.llvm.org/D75025 --- .../Scalar/DeadStoreElimination.cpp | 57 ++++++++++++++++--- 1 file changed, 49 insertions(+), 8 deletions(-) diff --git a/llvm/lib/Transforms/Scalar/DeadStoreElimination.cpp b/llvm/lib/Transforms/Scalar/DeadStoreElimination.cpp index 9d0e026c2809..4e0c7ac25969 100644 --- a/llvm/lib/Transforms/Scalar/DeadStoreElimination.cpp +++ b/llvm/lib/Transforms/Scalar/DeadStoreElimination.cpp @@ -87,6 +87,8 @@ STATISTIC(NumModifiedStores, "Number of stores modified"); STATISTIC(NumCFGChecks, "Number of stores modified"); STATISTIC(NumCFGTries, "Number of stores modified"); STATISTIC(NumCFGSuccess, "Number of stores modified"); +STATISTIC(NumDomMemDefChecks, + "Number iterations check for reads in getDomMemoryDef"); DEBUG_COUNTER(MemorySSACounter, "dse-memoryssa", "Controls which MemoryDefs are eliminated."); @@ -1542,6 +1544,18 @@ struct DSEState { /// basic block. DenseMap IOLs; + struct CheckCache { + SmallPtrSet KnownNoReads; + SmallPtrSet KnownReads; + + bool isKnownNoRead(MemoryAccess *A) const { + return KnownNoReads.find(A) != KnownNoReads.end(); + } + bool isKnownRead(MemoryAccess *A) const { + return KnownReads.find(A) != KnownReads.end(); + } + }; + DSEState(Function &F, AliasAnalysis &AA, MemorySSA &MSSA, DominatorTree &DT, PostDominatorTree &PDT, const TargetLibraryInfo &TLI) : F(F), AA(AA), BatchAA(AA), MSSA(MSSA), DT(DT), PDT(PDT), TLI(TLI), @@ -1775,17 +1789,17 @@ struct DSEState { // MemoryDef, return None. The returned value may not (completely) overwrite // \p DefLoc. Currently we bail out when we encounter an aliasing MemoryUse // (read). - Optional getDomMemoryDef(MemoryDef *KillingDef, - MemoryAccess *Current, - MemoryLocation DefLoc, - const Value *DefUO, - unsigned &ScanLimit) { + Optional + getDomMemoryDef(MemoryDef *KillingDef, MemoryAccess *Current, + MemoryLocation DefLoc, const Value *DefUO, CheckCache &Cache, + unsigned &ScanLimit) { if (ScanLimit == 0) { LLVM_DEBUG(dbgs() << "\n ... hit scan limit\n"); return None; } MemoryAccess *DomAccess; + MemoryAccess *StartAccess = Current; bool StepAgain; LLVM_DEBUG(dbgs() << " trying to get dominating access for " << *Current << "\n"); @@ -1825,10 +1839,13 @@ struct DSEState { // they cover all paths from DomAccess to any function exit. SmallPtrSet KillingDefs; KillingDefs.insert(KillingDef->getMemoryInst()); + Instruction *DomMemInst = isa(DomAccess) + ? cast(DomAccess)->getMemoryInst() + : nullptr; LLVM_DEBUG({ dbgs() << " Checking for reads of " << *DomAccess; - if (isa(DomAccess)) - dbgs() << " (" << *cast(DomAccess)->getMemoryInst() << ")\n"; + if (DomMemInst) + dbgs() << " (" << *DomMemInst << ")\n"; else dbgs() << ")\n"; }); @@ -1840,6 +1857,11 @@ struct DSEState { }; PushMemUses(DomAccess); + // Optimistically collect all accesses for reads. If we do not find any + // read clobbers, add them to the cache. + SmallPtrSet KnownNoReads; + if (!DomMemInst || !DomMemInst->mayReadFromMemory()) + KnownNoReads.insert(DomAccess); // Check if DomDef may be read. for (unsigned I = 0; I < WorkList.size(); I++) { MemoryAccess *UseAccess = WorkList[I]; @@ -1851,6 +1873,20 @@ struct DSEState { return None; } --ScanLimit; + NumDomMemDefChecks++; + + // Check if we already visited this access. + if (Cache.isKnownNoRead(UseAccess)) { + LLVM_DEBUG(dbgs() << " ... skip, discovered that " << *UseAccess + << " is safe earlier.\n"); + continue; + } + if (Cache.isKnownRead(UseAccess)) { + LLVM_DEBUG(dbgs() << " ... bail out, discovered that " << *UseAccess + << " has a read-clobber earlier.\n"); + return None; + } + KnownNoReads.insert(UseAccess); if (isa(UseAccess)) { if (any_of(KillingDefs, [this, UseAccess](Instruction *KI) { @@ -1890,6 +1926,9 @@ struct DSEState { // original MD. Stop walk. if (isReadClobber(DefLoc, UseInst)) { LLVM_DEBUG(dbgs() << " ... found read clobber\n"); + Cache.KnownReads.insert(UseAccess); + Cache.KnownReads.insert(StartAccess); + Cache.KnownReads.insert(DomAccess); return None; } @@ -2007,6 +2046,7 @@ struct DSEState { } // No aliasing MemoryUses of DomAccess found, DomAccess is potentially dead. + Cache.KnownNoReads.insert(KnownNoReads.begin(), KnownNoReads.end()); return {DomAccess}; } @@ -2212,6 +2252,7 @@ bool eliminateDeadStoresMemorySSA(Function &F, AliasAnalysis &AA, SetVector ToCheck; ToCheck.insert(KillingDef->getDefiningAccess()); + DSEState::CheckCache Cache; // Check if MemoryAccesses in the worklist are killed by KillingDef. for (unsigned I = 0; I < ToCheck.size(); I++) { Current = ToCheck[I]; @@ -2219,7 +2260,7 @@ bool eliminateDeadStoresMemorySSA(Function &F, AliasAnalysis &AA, continue; Optional Next = State.getDomMemoryDef( - KillingDef, Current, SILoc, SILocUnd, ScanLimit); + KillingDef, Current, SILoc, SILocUnd, Cache, ScanLimit); if (!Next) { LLVM_DEBUG(dbgs() << " finished walk\n");