2007-07-07 07:14:35 +08:00
|
|
|
//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
2007-12-30 04:36:04 +08:00
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
2007-07-07 07:14:35 +08:00
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file implements an analysis that determines, for a given memory
|
|
|
|
// operation, what preceding memory operations it depends on. It builds on
|
2007-08-09 06:01:54 +08:00
|
|
|
// alias analysis information, and tries to provide a lazy, caching interface to
|
2007-07-07 07:14:35 +08:00
|
|
|
// a common kind of alias information query.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2008-11-29 05:45:17 +08:00
|
|
|
#define DEBUG_TYPE "memdep"
|
2007-07-07 07:14:35 +08:00
|
|
|
#include "llvm/Analysis/MemoryDependenceAnalysis.h"
|
2007-07-11 01:25:03 +08:00
|
|
|
#include "llvm/Constants.h"
|
2007-07-07 07:14:35 +08:00
|
|
|
#include "llvm/Instructions.h"
|
|
|
|
#include "llvm/Function.h"
|
|
|
|
#include "llvm/Analysis/AliasAnalysis.h"
|
2008-11-29 06:28:27 +08:00
|
|
|
#include "llvm/ADT/Statistic.h"
|
|
|
|
#include "llvm/ADT/STLExtras.h"
|
2007-07-25 05:52:37 +08:00
|
|
|
#include "llvm/Support/CFG.h"
|
2008-02-06 08:54:55 +08:00
|
|
|
#include "llvm/Support/CommandLine.h"
|
2008-11-29 05:45:17 +08:00
|
|
|
#include "llvm/Support/Debug.h"
|
2007-07-07 07:14:35 +08:00
|
|
|
#include "llvm/Target/TargetData.h"
|
|
|
|
using namespace llvm;
|
|
|
|
|
2008-11-30 06:02:15 +08:00
|
|
|
STATISTIC(NumCacheNonLocal, "Number of cached non-local responses");
|
|
|
|
STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses");
|
2007-09-10 05:43:49 +08:00
|
|
|
|
2007-07-07 07:14:35 +08:00
|
|
|
char MemoryDependenceAnalysis::ID = 0;
|
|
|
|
|
|
|
|
// Register this pass...
|
2007-07-11 04:21:08 +08:00
|
|
|
static RegisterPass<MemoryDependenceAnalysis> X("memdep",
|
2008-11-29 05:45:17 +08:00
|
|
|
"Memory Dependence Analysis", false, true);
|
2007-07-07 07:14:35 +08:00
|
|
|
|
|
|
|
/// getAnalysisUsage - Does not modify anything. It uses Alias Analysis.
|
|
|
|
///
|
|
|
|
void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
|
|
|
|
AU.setPreservesAll();
|
|
|
|
AU.addRequiredTransitive<AliasAnalysis>();
|
|
|
|
AU.addRequiredTransitive<TargetData>();
|
|
|
|
}
|
|
|
|
|
2007-08-09 06:26:03 +08:00
|
|
|
/// getCallSiteDependency - Private helper for finding the local dependencies
|
|
|
|
/// of a call site.
|
2008-11-30 09:26:32 +08:00
|
|
|
MemoryDependenceAnalysis::DepResultTy MemoryDependenceAnalysis::
|
2008-11-29 11:47:00 +08:00
|
|
|
getCallSiteDependency(CallSite C, BasicBlock::iterator ScanIt,
|
|
|
|
BasicBlock *BB) {
|
2008-11-29 11:22:12 +08:00
|
|
|
AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
|
|
|
|
TargetData &TD = getAnalysis<TargetData>();
|
2007-08-07 08:33:45 +08:00
|
|
|
|
2007-08-09 06:26:03 +08:00
|
|
|
// Walk backwards through the block, looking for dependencies
|
2008-11-29 11:47:00 +08:00
|
|
|
while (ScanIt != BB->begin()) {
|
|
|
|
Instruction *Inst = --ScanIt;
|
2007-07-11 01:59:22 +08:00
|
|
|
|
|
|
|
// If this inst is a memory op, get the pointer it accessed
|
2008-11-29 17:15:21 +08:00
|
|
|
Value *Pointer = 0;
|
|
|
|
uint64_t PointerSize = 0;
|
|
|
|
if (StoreInst *S = dyn_cast<StoreInst>(Inst)) {
|
|
|
|
Pointer = S->getPointerOperand();
|
|
|
|
PointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
|
|
|
|
} else if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) {
|
|
|
|
Pointer = AI;
|
|
|
|
if (ConstantInt *C = dyn_cast<ConstantInt>(AI->getArraySize()))
|
2008-11-30 05:22:42 +08:00
|
|
|
// Use ABI size (size between elements), not store size (size of one
|
|
|
|
// element without padding).
|
2008-11-29 17:15:21 +08:00
|
|
|
PointerSize = C->getZExtValue() *
|
2008-11-30 05:22:42 +08:00
|
|
|
TD.getABITypeSize(AI->getAllocatedType());
|
2007-07-11 01:59:22 +08:00
|
|
|
else
|
2008-11-29 17:15:21 +08:00
|
|
|
PointerSize = ~0UL;
|
|
|
|
} else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
|
|
|
|
Pointer = V->getOperand(0);
|
|
|
|
PointerSize = TD.getTypeStoreSize(V->getType());
|
|
|
|
} else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) {
|
|
|
|
Pointer = F->getPointerOperand();
|
2007-07-11 01:59:22 +08:00
|
|
|
|
|
|
|
// FreeInsts erase the entire structure
|
2008-11-29 17:15:21 +08:00
|
|
|
PointerSize = ~0UL;
|
|
|
|
} else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) {
|
|
|
|
if (AA.getModRefBehavior(CallSite::get(Inst)) ==
|
2008-11-29 11:47:00 +08:00
|
|
|
AliasAnalysis::DoesNotAccessMemory)
|
2008-11-29 17:15:21 +08:00
|
|
|
continue;
|
2008-11-30 09:26:32 +08:00
|
|
|
return DepResultTy(Inst, Normal);
|
2007-07-11 04:39:07 +08:00
|
|
|
} else
|
|
|
|
continue;
|
2007-07-11 01:59:22 +08:00
|
|
|
|
2008-11-29 17:15:21 +08:00
|
|
|
if (AA.getModRefInfo(C, Pointer, PointerSize) != AliasAnalysis::NoModRef)
|
2008-11-30 09:26:32 +08:00
|
|
|
return DepResultTy(Inst, Normal);
|
2007-07-11 01:59:22 +08:00
|
|
|
}
|
|
|
|
|
2008-11-29 11:47:00 +08:00
|
|
|
// No dependence found.
|
2008-11-30 09:26:32 +08:00
|
|
|
return DepResultTy(0, NonLocal);
|
2007-07-11 01:59:22 +08:00
|
|
|
}
|
|
|
|
|
2007-07-07 07:14:35 +08:00
|
|
|
/// getDependency - Return the instruction on which a memory operation
|
2008-04-11 07:02:38 +08:00
|
|
|
/// depends. The local parameter indicates if the query should only
|
2007-07-11 01:08:11 +08:00
|
|
|
/// evaluate dependencies within the same basic block.
|
2008-11-30 09:26:32 +08:00
|
|
|
MemoryDependenceAnalysis::DepResultTy MemoryDependenceAnalysis::
|
|
|
|
getDependencyFromInternal(Instruction *QueryInst, BasicBlock::iterator ScanIt,
|
|
|
|
BasicBlock *BB) {
|
2008-11-29 11:47:00 +08:00
|
|
|
AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
|
|
|
|
TargetData &TD = getAnalysis<TargetData>();
|
2007-07-07 07:14:35 +08:00
|
|
|
|
|
|
|
// Get the pointer value for which dependence will be determined
|
2008-11-29 16:51:16 +08:00
|
|
|
Value *MemPtr = 0;
|
|
|
|
uint64_t MemSize = 0;
|
|
|
|
bool MemVolatile = false;
|
2008-11-29 11:47:00 +08:00
|
|
|
|
|
|
|
if (StoreInst* S = dyn_cast<StoreInst>(QueryInst)) {
|
2008-11-29 16:51:16 +08:00
|
|
|
MemPtr = S->getPointerOperand();
|
|
|
|
MemSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
|
|
|
|
MemVolatile = S->isVolatile();
|
2008-11-29 11:47:00 +08:00
|
|
|
} else if (LoadInst* L = dyn_cast<LoadInst>(QueryInst)) {
|
2008-11-29 16:51:16 +08:00
|
|
|
MemPtr = L->getPointerOperand();
|
|
|
|
MemSize = TD.getTypeStoreSize(L->getType());
|
|
|
|
MemVolatile = L->isVolatile();
|
2008-11-29 11:47:00 +08:00
|
|
|
} else if (VAArgInst* V = dyn_cast<VAArgInst>(QueryInst)) {
|
2008-11-29 16:51:16 +08:00
|
|
|
MemPtr = V->getOperand(0);
|
|
|
|
MemSize = TD.getTypeStoreSize(V->getType());
|
2008-11-29 11:47:00 +08:00
|
|
|
} else if (FreeInst* F = dyn_cast<FreeInst>(QueryInst)) {
|
2008-11-29 16:51:16 +08:00
|
|
|
MemPtr = F->getPointerOperand();
|
|
|
|
// FreeInsts erase the entire structure, not just a field.
|
|
|
|
MemSize = ~0UL;
|
|
|
|
} else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst))
|
2008-11-29 11:47:00 +08:00
|
|
|
return getCallSiteDependency(CallSite::get(QueryInst), ScanIt, BB);
|
2008-11-29 16:51:16 +08:00
|
|
|
else // Non-memory instructions depend on nothing.
|
2008-11-30 09:26:32 +08:00
|
|
|
return DepResultTy(0, None);
|
2007-07-07 07:14:35 +08:00
|
|
|
|
2007-08-09 06:26:03 +08:00
|
|
|
// Walk backwards through the basic block, looking for dependencies
|
2008-11-29 11:47:00 +08:00
|
|
|
while (ScanIt != BB->begin()) {
|
|
|
|
Instruction *Inst = --ScanIt;
|
2008-11-29 17:09:48 +08:00
|
|
|
|
|
|
|
// If the access is volatile and this is a volatile load/store, return a
|
|
|
|
// dependence.
|
|
|
|
if (MemVolatile &&
|
|
|
|
((isa<LoadInst>(Inst) && cast<LoadInst>(Inst)->isVolatile()) ||
|
|
|
|
(isa<StoreInst>(Inst) && cast<StoreInst>(Inst)->isVolatile())))
|
2008-11-30 09:26:32 +08:00
|
|
|
return DepResultTy(Inst, Normal);
|
2008-11-29 17:09:48 +08:00
|
|
|
|
|
|
|
// MemDep is broken w.r.t. loads: it says that two loads of the same pointer
|
|
|
|
// depend on each other. :(
|
|
|
|
if (LoadInst *L = dyn_cast<LoadInst>(Inst)) {
|
|
|
|
Value *Pointer = L->getPointerOperand();
|
|
|
|
uint64_t PointerSize = TD.getTypeStoreSize(L->getType());
|
2007-07-11 02:11:42 +08:00
|
|
|
|
2008-11-29 17:09:48 +08:00
|
|
|
// If we found a pointer, check if it could be the same as our pointer
|
|
|
|
AliasAnalysis::AliasResult R =
|
|
|
|
AA.alias(Pointer, PointerSize, MemPtr, MemSize);
|
2007-07-11 02:11:42 +08:00
|
|
|
|
2008-11-29 17:09:48 +08:00
|
|
|
if (R == AliasAnalysis::NoAlias)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// May-alias loads don't depend on each other without a dependence.
|
|
|
|
if (isa<LoadInst>(QueryInst) && R == AliasAnalysis::MayAlias)
|
|
|
|
continue;
|
2008-11-30 09:26:32 +08:00
|
|
|
return DepResultTy(Inst, Normal);
|
2008-11-29 17:09:48 +08:00
|
|
|
}
|
2008-11-30 09:39:32 +08:00
|
|
|
|
|
|
|
// If this is an allocation, and if we know that the accessed pointer is to
|
|
|
|
// the allocation, return None. This means that there is no dependence and
|
|
|
|
// the access can be optimized based on that. For example, a load could
|
|
|
|
// turn into undef.
|
2008-11-29 17:09:48 +08:00
|
|
|
if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) {
|
2008-11-30 09:39:32 +08:00
|
|
|
Value *AccessPtr = MemPtr->getUnderlyingObject();
|
2007-07-11 01:08:11 +08:00
|
|
|
|
2008-11-30 09:39:32 +08:00
|
|
|
if (AccessPtr == AI ||
|
|
|
|
AA.alias(AI, 1, AccessPtr, 1) == AliasAnalysis::MustAlias)
|
|
|
|
return DepResultTy(0, None);
|
|
|
|
continue;
|
2007-07-07 07:14:35 +08:00
|
|
|
}
|
2008-11-29 17:09:48 +08:00
|
|
|
|
|
|
|
// See if this instruction mod/ref's the pointer.
|
|
|
|
AliasAnalysis::ModRefResult MRR = AA.getModRefInfo(Inst, MemPtr, MemSize);
|
|
|
|
|
|
|
|
if (MRR == AliasAnalysis::NoModRef)
|
2008-11-29 16:51:16 +08:00
|
|
|
continue;
|
|
|
|
|
2008-11-29 17:09:48 +08:00
|
|
|
// Loads don't depend on read-only instructions.
|
|
|
|
if (isa<LoadInst>(QueryInst) && MRR == AliasAnalysis::Ref)
|
2008-11-29 16:51:16 +08:00
|
|
|
continue;
|
2008-11-29 17:09:48 +08:00
|
|
|
|
|
|
|
// Otherwise, there is a dependence.
|
2008-11-30 09:26:32 +08:00
|
|
|
return DepResultTy(Inst, Normal);
|
2007-07-07 07:14:35 +08:00
|
|
|
}
|
|
|
|
|
2008-11-29 11:47:00 +08:00
|
|
|
// If we found nothing, return the non-local flag.
|
2008-11-30 09:26:32 +08:00
|
|
|
return DepResultTy(0, NonLocal);
|
2007-07-07 07:14:35 +08:00
|
|
|
}
|
|
|
|
|
2008-11-29 11:47:00 +08:00
|
|
|
/// getDependency - Return the instruction on which a memory operation
|
|
|
|
/// depends.
|
|
|
|
MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
|
|
|
|
Instruction *ScanPos = QueryInst;
|
|
|
|
|
|
|
|
// Check for a cached result
|
|
|
|
DepResultTy &LocalCache = LocalDeps[QueryInst];
|
|
|
|
|
2008-11-30 06:02:15 +08:00
|
|
|
// If the cached entry is non-dirty, just return it. Note that this depends
|
|
|
|
// on DepResultTy's default constructing to 'dirty'.
|
2008-11-29 11:47:00 +08:00
|
|
|
if (LocalCache.getInt() != Dirty)
|
|
|
|
return ConvToResult(LocalCache);
|
|
|
|
|
|
|
|
// Otherwise, if we have a dirty entry, we know we can start the scan at that
|
|
|
|
// instruction, which may save us some work.
|
|
|
|
if (Instruction *Inst = LocalCache.getPointer())
|
|
|
|
ScanPos = Inst;
|
|
|
|
|
|
|
|
// Do the scan.
|
2008-11-30 09:26:32 +08:00
|
|
|
LocalCache = getDependencyFromInternal(QueryInst, ScanPos,
|
|
|
|
QueryInst->getParent());
|
2008-11-29 11:47:00 +08:00
|
|
|
|
|
|
|
// Remember the result!
|
2008-11-30 09:26:32 +08:00
|
|
|
if (Instruction *I = LocalCache.getPointer())
|
2008-11-29 17:20:15 +08:00
|
|
|
ReverseLocalDeps[I].insert(QueryInst);
|
2008-11-29 11:47:00 +08:00
|
|
|
|
2008-11-30 09:26:32 +08:00
|
|
|
return ConvToResult(LocalCache);
|
2008-11-29 11:47:00 +08:00
|
|
|
}
|
|
|
|
|
2008-11-30 09:18:27 +08:00
|
|
|
/// getNonLocalDependency - Perform a full dependency query for the
|
|
|
|
/// specified instruction, returning the set of blocks that the value is
|
|
|
|
/// potentially live across. The returned set of results will include a
|
|
|
|
/// "NonLocal" result for all blocks where the value is live across.
|
|
|
|
///
|
|
|
|
/// This method assumes the instruction returns a "nonlocal" dependency
|
|
|
|
/// within its own block.
|
|
|
|
///
|
|
|
|
void MemoryDependenceAnalysis::
|
|
|
|
getNonLocalDependency(Instruction *QueryInst,
|
|
|
|
SmallVectorImpl<std::pair<BasicBlock*,
|
|
|
|
MemDepResult> > &Result) {
|
|
|
|
assert(getDependency(QueryInst).isNonLocal() &&
|
|
|
|
"getNonLocalDependency should only be used on insts with non-local deps!");
|
|
|
|
DenseMap<BasicBlock*, DepResultTy> &Cache = NonLocalDeps[QueryInst];
|
|
|
|
|
|
|
|
/// DirtyBlocks - This is the set of blocks that need to be recomputed. In
|
|
|
|
/// the cached case, this can happen due to instructions being deleted etc. In
|
|
|
|
/// the uncached case, this starts out as the set of predecessors we care
|
|
|
|
/// about.
|
|
|
|
SmallVector<BasicBlock*, 32> DirtyBlocks;
|
|
|
|
|
|
|
|
if (!Cache.empty()) {
|
|
|
|
// If we already have a partially computed set of results, scan them to
|
|
|
|
// determine what is dirty, seeding our initial DirtyBlocks worklist.
|
|
|
|
// FIXME: In the "don't need to be updated" case, this is expensive, why not
|
|
|
|
// have a per-"cache" flag saying it is undirty?
|
|
|
|
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = Cache.begin(),
|
|
|
|
E = Cache.end(); I != E; ++I)
|
|
|
|
if (I->second.getInt() == Dirty)
|
|
|
|
DirtyBlocks.push_back(I->first);
|
|
|
|
|
|
|
|
NumCacheNonLocal++;
|
|
|
|
|
|
|
|
//cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
|
|
|
|
// << Cache.size() << " cached: " << *QueryInst;
|
|
|
|
} else {
|
|
|
|
// Seed DirtyBlocks with each of the preds of QueryInst's block.
|
|
|
|
BasicBlock *QueryBB = QueryInst->getParent();
|
|
|
|
DirtyBlocks.append(pred_begin(QueryBB), pred_end(QueryBB));
|
|
|
|
NumUncacheNonLocal++;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Iterate while we still have blocks to update.
|
|
|
|
while (!DirtyBlocks.empty()) {
|
|
|
|
BasicBlock *DirtyBB = DirtyBlocks.back();
|
|
|
|
DirtyBlocks.pop_back();
|
|
|
|
|
|
|
|
// Get the entry for this block. Note that this relies on DepResultTy
|
|
|
|
// default initializing to Dirty.
|
|
|
|
DepResultTy &DirtyBBEntry = Cache[DirtyBB];
|
|
|
|
|
|
|
|
// If DirtyBBEntry isn't dirty, it ended up on the worklist multiple times.
|
|
|
|
if (DirtyBBEntry.getInt() != Dirty) continue;
|
|
|
|
|
|
|
|
// If the dirty entry has a pointer, start scanning from it so we don't have
|
|
|
|
// to rescan the entire block.
|
|
|
|
BasicBlock::iterator ScanPos = DirtyBB->end();
|
|
|
|
if (Instruction *Inst = DirtyBBEntry.getPointer())
|
|
|
|
ScanPos = Inst;
|
|
|
|
|
2008-11-30 09:26:32 +08:00
|
|
|
// Find out if this block has a local dependency for QueryInst.
|
|
|
|
DirtyBBEntry = getDependencyFromInternal(QueryInst, ScanPos, DirtyBB);
|
2008-11-30 09:18:27 +08:00
|
|
|
|
|
|
|
// If the block has a dependency (i.e. it isn't completely transparent to
|
|
|
|
// the value), remember it!
|
|
|
|
if (DirtyBBEntry.getInt() != NonLocal) {
|
|
|
|
// Keep the ReverseNonLocalDeps map up to date so we can efficiently
|
|
|
|
// update this when we remove instructions.
|
|
|
|
if (Instruction *Inst = DirtyBBEntry.getPointer())
|
|
|
|
ReverseNonLocalDeps[Inst].insert(QueryInst);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
// If the block *is* completely transparent to the load, we need to check
|
|
|
|
// the predecessors of this block. Add them to our worklist.
|
|
|
|
DirtyBlocks.append(pred_begin(DirtyBB), pred_end(DirtyBB));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Copy the result into the output set.
|
|
|
|
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = Cache.begin(),
|
|
|
|
E = Cache.end(); I != E; ++I)
|
|
|
|
Result.push_back(std::make_pair(I->first, ConvToResult(I->second)));
|
|
|
|
}
|
|
|
|
|
2007-07-07 07:14:35 +08:00
|
|
|
/// removeInstruction - Remove an instruction from the dependence analysis,
|
|
|
|
/// updating the dependence of instructions that previously depended on it.
|
2007-08-09 06:26:03 +08:00
|
|
|
/// This method attempts to keep the cache coherent using the reverse map.
|
2008-11-29 06:04:47 +08:00
|
|
|
void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
|
|
|
|
// Walk through the Non-local dependencies, removing this one as the value
|
|
|
|
// for any cached queries.
|
2008-11-29 09:43:36 +08:00
|
|
|
for (DenseMap<BasicBlock*, DepResultTy>::iterator DI =
|
2008-11-29 17:20:15 +08:00
|
|
|
NonLocalDeps[RemInst].begin(), DE = NonLocalDeps[RemInst].end();
|
2007-12-08 09:37:09 +08:00
|
|
|
DI != DE; ++DI)
|
2008-11-29 11:22:12 +08:00
|
|
|
if (Instruction *Inst = DI->second.getPointer())
|
2008-11-29 17:20:15 +08:00
|
|
|
ReverseNonLocalDeps[Inst].erase(RemInst);
|
2007-08-01 04:01:27 +08:00
|
|
|
|
2008-11-29 06:04:47 +08:00
|
|
|
// If we have a cached local dependence query for this instruction, remove it.
|
2008-11-29 06:28:27 +08:00
|
|
|
//
|
2008-11-29 09:43:36 +08:00
|
|
|
LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
|
|
|
|
if (LocalDepEntry != LocalDeps.end()) {
|
2008-11-29 06:28:27 +08:00
|
|
|
// Remove us from DepInst's reverse set now that the local dep info is gone.
|
2008-11-30 09:09:30 +08:00
|
|
|
if (Instruction *Inst = LocalDepEntry->second.getPointer()) {
|
|
|
|
SmallPtrSet<Instruction*, 4> &RLD = ReverseLocalDeps[Inst];
|
|
|
|
RLD.erase(RemInst);
|
|
|
|
if (RLD.empty())
|
|
|
|
ReverseLocalDeps.erase(Inst);
|
2007-08-01 04:01:27 +08:00
|
|
|
}
|
2008-11-30 09:09:30 +08:00
|
|
|
|
|
|
|
// Remove this local dependency info.
|
|
|
|
LocalDeps.erase(LocalDepEntry);
|
|
|
|
}
|
2008-11-29 06:28:27 +08:00
|
|
|
|
2008-11-29 06:51:08 +08:00
|
|
|
// Loop over all of the things that depend on the instruction we're removing.
|
|
|
|
//
|
2008-11-30 07:30:39 +08:00
|
|
|
SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
|
|
|
|
|
2008-11-29 17:20:15 +08:00
|
|
|
ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
|
|
|
|
if (ReverseDepIt != ReverseLocalDeps.end()) {
|
2008-11-29 06:51:08 +08:00
|
|
|
SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
|
2008-11-30 09:09:30 +08:00
|
|
|
// RemInst can't be the terminator if it has stuff depending on it.
|
|
|
|
assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) &&
|
|
|
|
"Nothing can locally depend on a terminator");
|
|
|
|
|
|
|
|
// Anything that was locally dependent on RemInst is now going to be
|
|
|
|
// dependent on the instruction after RemInst. It will have the dirty flag
|
|
|
|
// set so it will rescan. This saves having to scan the entire block to get
|
|
|
|
// to this point.
|
|
|
|
Instruction *NewDepInst = next(BasicBlock::iterator(RemInst));
|
|
|
|
|
2008-11-29 06:51:08 +08:00
|
|
|
for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
|
|
|
|
E = ReverseDeps.end(); I != E; ++I) {
|
|
|
|
Instruction *InstDependingOnRemInst = *I;
|
|
|
|
|
|
|
|
// If we thought the instruction depended on itself (possible for
|
|
|
|
// unconfirmed dependencies) ignore the update.
|
|
|
|
if (InstDependingOnRemInst == RemInst) continue;
|
2008-11-30 09:09:30 +08:00
|
|
|
|
|
|
|
LocalDeps[InstDependingOnRemInst] = DepResultTy(NewDepInst, Dirty);
|
2008-11-29 06:51:08 +08:00
|
|
|
|
2008-11-30 09:09:30 +08:00
|
|
|
// Make sure to remember that new things depend on NewDepInst.
|
|
|
|
ReverseDepsToAdd.push_back(std::make_pair(NewDepInst,
|
|
|
|
InstDependingOnRemInst));
|
2008-11-30 07:30:39 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
ReverseLocalDeps.erase(ReverseDepIt);
|
|
|
|
|
|
|
|
// Add new reverse deps after scanning the set, to avoid invalidating the
|
|
|
|
// 'ReverseDeps' reference.
|
|
|
|
while (!ReverseDepsToAdd.empty()) {
|
|
|
|
ReverseLocalDeps[ReverseDepsToAdd.back().first]
|
|
|
|
.insert(ReverseDepsToAdd.back().second);
|
|
|
|
ReverseDepsToAdd.pop_back();
|
2008-11-29 06:51:08 +08:00
|
|
|
}
|
2007-07-07 07:14:35 +08:00
|
|
|
}
|
2007-08-17 05:27:05 +08:00
|
|
|
|
2008-11-29 17:20:15 +08:00
|
|
|
ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
|
|
|
|
if (ReverseDepIt != ReverseNonLocalDeps.end()) {
|
2008-11-29 06:51:08 +08:00
|
|
|
SmallPtrSet<Instruction*, 4>& set = ReverseDepIt->second;
|
2007-08-17 05:27:05 +08:00
|
|
|
for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
|
|
|
|
I != E; ++I)
|
2008-11-29 17:20:15 +08:00
|
|
|
for (DenseMap<BasicBlock*, DepResultTy>::iterator
|
|
|
|
DI = NonLocalDeps[*I].begin(), DE = NonLocalDeps[*I].end();
|
2007-09-21 11:53:52 +08:00
|
|
|
DI != DE; ++DI)
|
2008-11-30 06:02:15 +08:00
|
|
|
if (DI->second.getPointer() == RemInst) {
|
|
|
|
// Convert to a dirty entry for the subsequent instruction.
|
|
|
|
DI->second.setInt(Dirty);
|
|
|
|
if (RemInst->isTerminator())
|
|
|
|
DI->second.setPointer(0);
|
|
|
|
else {
|
|
|
|
Instruction *NextI = next(BasicBlock::iterator(RemInst));
|
|
|
|
DI->second.setPointer(NextI);
|
2008-11-30 07:30:39 +08:00
|
|
|
assert(NextI != RemInst);
|
2008-11-30 06:02:15 +08:00
|
|
|
ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
|
|
|
|
}
|
|
|
|
}
|
2008-11-30 07:30:39 +08:00
|
|
|
|
|
|
|
ReverseNonLocalDeps.erase(ReverseDepIt);
|
|
|
|
|
2008-11-30 06:02:15 +08:00
|
|
|
// Add new reverse deps after scanning the set, to avoid invalidating 'Set'
|
|
|
|
while (!ReverseDepsToAdd.empty()) {
|
|
|
|
ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
|
|
|
|
.insert(ReverseDepsToAdd.back().second);
|
|
|
|
ReverseDepsToAdd.pop_back();
|
|
|
|
}
|
2007-08-17 05:27:05 +08:00
|
|
|
}
|
2007-12-08 09:37:09 +08:00
|
|
|
|
2008-11-29 17:20:15 +08:00
|
|
|
NonLocalDeps.erase(RemInst);
|
2008-11-29 06:04:47 +08:00
|
|
|
getAnalysis<AliasAnalysis>().deleteValue(RemInst);
|
|
|
|
DEBUG(verifyRemoved(RemInst));
|
2007-07-07 07:14:35 +08:00
|
|
|
}
|
2008-11-30 05:25:10 +08:00
|
|
|
|
|
|
|
/// verifyRemoved - Verify that the specified instruction does not occur
|
|
|
|
/// in our internal data structures.
|
|
|
|
void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
|
|
|
|
for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
|
|
|
|
E = LocalDeps.end(); I != E; ++I) {
|
|
|
|
assert(I->first != D && "Inst occurs in data structures");
|
|
|
|
assert(I->second.getPointer() != D &&
|
|
|
|
"Inst occurs in data structures");
|
|
|
|
}
|
|
|
|
|
|
|
|
for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
|
|
|
|
E = NonLocalDeps.end(); I != E; ++I) {
|
|
|
|
assert(I->first != D && "Inst occurs in data structures");
|
|
|
|
for (DenseMap<BasicBlock*, DepResultTy>::iterator II = I->second.begin(),
|
|
|
|
EE = I->second.end(); II != EE; ++II)
|
|
|
|
assert(II->second.getPointer() != D && "Inst occurs in data structures");
|
|
|
|
}
|
|
|
|
|
|
|
|
for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
|
|
|
|
E = ReverseLocalDeps.end(); I != E; ++I)
|
|
|
|
for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
|
|
|
|
EE = I->second.end(); II != EE; ++II)
|
|
|
|
assert(*II != D && "Inst occurs in data structures");
|
|
|
|
|
|
|
|
for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
|
|
|
|
E = ReverseNonLocalDeps.end();
|
|
|
|
I != E; ++I)
|
|
|
|
for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
|
|
|
|
EE = I->second.end(); II != EE; ++II)
|
|
|
|
assert(*II != D && "Inst occurs in data structures");
|
|
|
|
}
|