[DSE,MemorySSA] Cache accesses with/without reachable read-clobbers.

Currently we repeatedly check the same uses for read clobbers in some
cases. We can avoid unnecessary checks by keeping track of the memory
accesses we already found read clobbers for. To do so, we just add
memory access causing read-clobbers to a set. Note that marking all
visited accesses as read-clobbers would be to pessimistic, as that might
include accesses not on any path to  the actual read clobber.

If we do not find any read-clobbers, we can add all visited instructions
to another set and use that to skip the same accesses in the next call.

Reviewed By: asbirlea

Differential Revision: https://reviews.llvm.org/D75025
This commit is contained in:
Florian Hahn 2020-08-25 08:43:32 +01:00
parent cdd339c568
commit e19ef1aab5
1 changed files with 49 additions and 8 deletions

View File

@ -87,6 +87,8 @@ STATISTIC(NumModifiedStores, "Number of stores modified");
STATISTIC(NumCFGChecks, "Number of stores modified");
STATISTIC(NumCFGTries, "Number of stores modified");
STATISTIC(NumCFGSuccess, "Number of stores modified");
STATISTIC(NumDomMemDefChecks,
"Number iterations check for reads in getDomMemoryDef");
DEBUG_COUNTER(MemorySSACounter, "dse-memoryssa",
"Controls which MemoryDefs are eliminated.");
@ -1542,6 +1544,18 @@ struct DSEState {
/// basic block.
DenseMap<BasicBlock *, InstOverlapIntervalsTy> IOLs;
struct CheckCache {
SmallPtrSet<MemoryAccess *, 16> KnownNoReads;
SmallPtrSet<MemoryAccess *, 16> KnownReads;
bool isKnownNoRead(MemoryAccess *A) const {
return KnownNoReads.find(A) != KnownNoReads.end();
}
bool isKnownRead(MemoryAccess *A) const {
return KnownReads.find(A) != KnownReads.end();
}
};
DSEState(Function &F, AliasAnalysis &AA, MemorySSA &MSSA, DominatorTree &DT,
PostDominatorTree &PDT, const TargetLibraryInfo &TLI)
: F(F), AA(AA), BatchAA(AA), MSSA(MSSA), DT(DT), PDT(PDT), TLI(TLI),
@ -1775,17 +1789,17 @@ struct DSEState {
// MemoryDef, return None. The returned value may not (completely) overwrite
// \p DefLoc. Currently we bail out when we encounter an aliasing MemoryUse
// (read).
Optional<MemoryAccess *> getDomMemoryDef(MemoryDef *KillingDef,
MemoryAccess *Current,
MemoryLocation DefLoc,
const Value *DefUO,
unsigned &ScanLimit) {
Optional<MemoryAccess *>
getDomMemoryDef(MemoryDef *KillingDef, MemoryAccess *Current,
MemoryLocation DefLoc, const Value *DefUO, CheckCache &Cache,
unsigned &ScanLimit) {
if (ScanLimit == 0) {
LLVM_DEBUG(dbgs() << "\n ... hit scan limit\n");
return None;
}
MemoryAccess *DomAccess;
MemoryAccess *StartAccess = Current;
bool StepAgain;
LLVM_DEBUG(dbgs() << " trying to get dominating access for " << *Current
<< "\n");
@ -1825,10 +1839,13 @@ struct DSEState {
// they cover all paths from DomAccess to any function exit.
SmallPtrSet<Instruction *, 16> KillingDefs;
KillingDefs.insert(KillingDef->getMemoryInst());
Instruction *DomMemInst = isa<MemoryDef>(DomAccess)
? cast<MemoryDef>(DomAccess)->getMemoryInst()
: nullptr;
LLVM_DEBUG({
dbgs() << " Checking for reads of " << *DomAccess;
if (isa<MemoryDef>(DomAccess))
dbgs() << " (" << *cast<MemoryDef>(DomAccess)->getMemoryInst() << ")\n";
if (DomMemInst)
dbgs() << " (" << *DomMemInst << ")\n";
else
dbgs() << ")\n";
});
@ -1840,6 +1857,11 @@ struct DSEState {
};
PushMemUses(DomAccess);
// Optimistically collect all accesses for reads. If we do not find any
// read clobbers, add them to the cache.
SmallPtrSet<MemoryAccess *, 16> KnownNoReads;
if (!DomMemInst || !DomMemInst->mayReadFromMemory())
KnownNoReads.insert(DomAccess);
// Check if DomDef may be read.
for (unsigned I = 0; I < WorkList.size(); I++) {
MemoryAccess *UseAccess = WorkList[I];
@ -1851,6 +1873,20 @@ struct DSEState {
return None;
}
--ScanLimit;
NumDomMemDefChecks++;
// Check if we already visited this access.
if (Cache.isKnownNoRead(UseAccess)) {
LLVM_DEBUG(dbgs() << " ... skip, discovered that " << *UseAccess
<< " is safe earlier.\n");
continue;
}
if (Cache.isKnownRead(UseAccess)) {
LLVM_DEBUG(dbgs() << " ... bail out, discovered that " << *UseAccess
<< " has a read-clobber earlier.\n");
return None;
}
KnownNoReads.insert(UseAccess);
if (isa<MemoryPhi>(UseAccess)) {
if (any_of(KillingDefs, [this, UseAccess](Instruction *KI) {
@ -1890,6 +1926,9 @@ struct DSEState {
// original MD. Stop walk.
if (isReadClobber(DefLoc, UseInst)) {
LLVM_DEBUG(dbgs() << " ... found read clobber\n");
Cache.KnownReads.insert(UseAccess);
Cache.KnownReads.insert(StartAccess);
Cache.KnownReads.insert(DomAccess);
return None;
}
@ -2007,6 +2046,7 @@ struct DSEState {
}
// No aliasing MemoryUses of DomAccess found, DomAccess is potentially dead.
Cache.KnownNoReads.insert(KnownNoReads.begin(), KnownNoReads.end());
return {DomAccess};
}
@ -2212,6 +2252,7 @@ bool eliminateDeadStoresMemorySSA(Function &F, AliasAnalysis &AA,
SetVector<MemoryAccess *> ToCheck;
ToCheck.insert(KillingDef->getDefiningAccess());
DSEState::CheckCache Cache;
// Check if MemoryAccesses in the worklist are killed by KillingDef.
for (unsigned I = 0; I < ToCheck.size(); I++) {
Current = ToCheck[I];
@ -2219,7 +2260,7 @@ bool eliminateDeadStoresMemorySSA(Function &F, AliasAnalysis &AA,
continue;
Optional<MemoryAccess *> Next = State.getDomMemoryDef(
KillingDef, Current, SILoc, SILocUnd, ScanLimit);
KillingDef, Current, SILoc, SILocUnd, Cache, ScanLimit);
if (!Next) {
LLVM_DEBUG(dbgs() << " finished walk\n");