Revert "[LVI] Normalize pointer behavior"

This reverts commit 15bc4dc9a8.

clang-cmake-x86_64-sde-avx512-linux buildbot reported quite a few
compile-time regressions in test-suite, will investigate.
This commit is contained in:
Nikita Popov 2019-11-08 18:20:43 +01:00
parent 4d0e07f786
commit 43ae5f4386
2 changed files with 91 additions and 97 deletions

View File

@ -151,11 +151,6 @@ namespace {
/// This is the cache kept by LazyValueInfo which
/// maintains information about queries across the clients' queries.
class LazyValueInfoCache {
public:
typedef DenseMap<PoisoningVH<BasicBlock>, SmallPtrSet<Value *, 4>>
PerBlockValueCacheTy;
private:
/// This is all of the cached block information for exactly one Value*.
/// The entries are sorted by the BasicBlock* of the
/// entries, allowing us to do a lookup with a binary search.
@ -167,6 +162,10 @@ namespace {
SmallDenseMap<PoisoningVH<BasicBlock>, ValueLatticeElement, 4> BlockVals;
};
/// This tracks, on a per-block basis, the set of values that are
/// over-defined at the end of that block.
typedef DenseMap<PoisoningVH<BasicBlock>, SmallPtrSet<Value *, 4>>
OverDefinedCacheTy;
/// Keep track of all blocks that we have ever seen, so we
/// don't spend time removing unused blocks from our caches.
DenseSet<PoisoningVH<BasicBlock> > SeenBlocks;
@ -174,12 +173,7 @@ namespace {
/// This is all of the cached information for all values,
/// mapped from Value* to key information.
DenseMap<Value *, std::unique_ptr<ValueCacheEntryTy>> ValueCache;
/// This tracks, on a per-block basis, the set of values that are
/// over-defined at the end of that block.
PerBlockValueCacheTy OverDefinedCache;
/// This tracks, on a per-block basis, the set of pointers that are
/// dereferenced in the block (and thus non-null at the end of the block).
PerBlockValueCacheTy DereferencedPointerCache;
OverDefinedCacheTy OverDefinedCache;
public:
@ -235,17 +229,11 @@ namespace {
return BBI->second;
}
std::pair<PerBlockValueCacheTy::iterator, bool>
getOrInitDereferencedPointers(BasicBlock *BB) {
return DereferencedPointerCache.try_emplace(BB);
}
/// clear - Empty the cache.
void clear() {
SeenBlocks.clear();
ValueCache.clear();
OverDefinedCache.clear();
DereferencedPointerCache.clear();
}
/// Inform the cache that a given value has been deleted.
@ -264,22 +252,17 @@ namespace {
};
}
static void eraseValueFromPerBlockValueCache(
Value *V, LazyValueInfoCache::PerBlockValueCacheTy &Cache) {
for (auto I = Cache.begin(), E = Cache.end(); I != E;) {
void LazyValueInfoCache::eraseValue(Value *V) {
for (auto I = OverDefinedCache.begin(), E = OverDefinedCache.end(); I != E;) {
// Copy and increment the iterator immediately so we can erase behind
// ourselves.
auto Iter = I++;
SmallPtrSetImpl<Value *> &ValueSet = Iter->second;
ValueSet.erase(V);
if (ValueSet.empty())
Cache.erase(Iter);
OverDefinedCache.erase(Iter);
}
}
void LazyValueInfoCache::eraseValue(Value *V) {
eraseValueFromPerBlockValueCache(V, OverDefinedCache);
eraseValueFromPerBlockValueCache(V, DereferencedPointerCache);
ValueCache.erase(V);
}
@ -296,8 +279,9 @@ void LazyValueInfoCache::eraseBlock(BasicBlock *BB) {
return;
SeenBlocks.erase(I);
OverDefinedCache.erase(BB);
DereferencedPointerCache.erase(BB);
auto ODI = OverDefinedCache.find(BB);
if (ODI != OverDefinedCache.end())
OverDefinedCache.erase(ODI);
for (auto &I : ValueCache)
I.second->BlockVals.erase(BB);
@ -454,7 +438,6 @@ namespace {
BasicBlock *BB);
bool solveBlockValueExtractValue(ValueLatticeElement &BBLV,
ExtractValueInst *EVI, BasicBlock *BB);
bool isNonNullDueToDereferenceInBlock(Value *Val, BasicBlock *BB);
void intersectAssumeOrGuardBlockValueConstantRange(Value *Val,
ValueLatticeElement &BBLV,
Instruction *BBI);
@ -636,20 +619,6 @@ bool LazyValueInfoImpl::solveBlockValue(Value *Val, BasicBlock *BB) {
bool LazyValueInfoImpl::solveBlockValueImpl(ValueLatticeElement &Res,
Value *Val, BasicBlock *BB) {
// If this value is a nonnull pointer, record it's range and bailout. Note
// that for all other pointer typed values, we terminate the search at the
// definition. We could easily extend this to look through geps, bitcasts,
// and the like to prove non-nullness, but it's not clear that's worth it
// compile time wise. The context-insensitive value walk done inside
// isKnownNonZero gets most of the profitable cases at much less expense.
// This does mean that we have a sensitivity to where the defining
// instruction is placed, even if it could legally be hoisted much higher.
// That is unfortunate.
PointerType *PT = dyn_cast<PointerType>(Val->getType());
if (PT && isKnownNonZero(Val, DL)) {
Res = ValueLatticeElement::getNot(ConstantPointerNull::get(PT));
return true;
}
Instruction *BBI = dyn_cast<Instruction>(Val);
if (!BBI || BBI->getParent() != BB)
@ -661,6 +630,20 @@ bool LazyValueInfoImpl::solveBlockValueImpl(ValueLatticeElement &Res,
if (auto *SI = dyn_cast<SelectInst>(BBI))
return solveBlockValueSelect(Res, SI, BB);
// If this value is a nonnull pointer, record it's range and bailout. Note
// that for all other pointer typed values, we terminate the search at the
// definition. We could easily extend this to look through geps, bitcasts,
// and the like to prove non-nullness, but it's not clear that's worth it
// compile time wise. The context-insensitive value walk done inside
// isKnownNonZero gets most of the profitable cases at much less expense.
// This does mean that we have a sensitivity to where the defining
// instruction is placed, even if it could legally be hoisted much higher.
// That is unfortunate.
PointerType *PT = dyn_cast<PointerType>(BBI->getType());
if (PT && isKnownNonZero(BBI, DL)) {
Res = ValueLatticeElement::getNot(ConstantPointerNull::get(PT));
return true;
}
if (BBI->getType()->isIntegerTy()) {
if (auto *CI = dyn_cast<CastInst>(BBI))
return solveBlockValueCast(Res, CI, BB);
@ -681,63 +664,75 @@ bool LazyValueInfoImpl::solveBlockValueImpl(ValueLatticeElement &Res,
return true;
}
static void AddDereferencedPointer(
Value *Ptr, SmallPtrSet<Value *, 4> &PtrSet, const DataLayout &DL) {
// TODO: Use NullPointerIsDefined instead.
if (Ptr->getType()->getPointerAddressSpace() == 0) {
Ptr = GetUnderlyingObject(Ptr, DL);
PtrSet.insert(Ptr);
}
}
static void AddPointersDereferencedByInstruction(
Instruction *I, SmallPtrSet<Value *, 4> &PtrSet, const DataLayout &DL) {
static bool InstructionDereferencesPointer(Instruction *I, Value *Ptr) {
if (LoadInst *L = dyn_cast<LoadInst>(I)) {
AddDereferencedPointer(L->getPointerOperand(), PtrSet, DL);
} else if (StoreInst *S = dyn_cast<StoreInst>(I)) {
AddDereferencedPointer(S->getPointerOperand(), PtrSet, DL);
} else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
if (MI->isVolatile()) return;
return L->getPointerAddressSpace() == 0 &&
GetUnderlyingObject(L->getPointerOperand(),
L->getModule()->getDataLayout()) == Ptr;
}
if (StoreInst *S = dyn_cast<StoreInst>(I)) {
return S->getPointerAddressSpace() == 0 &&
GetUnderlyingObject(S->getPointerOperand(),
S->getModule()->getDataLayout()) == Ptr;
}
if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
if (MI->isVolatile()) return false;
// FIXME: check whether it has a valuerange that excludes zero?
ConstantInt *Len = dyn_cast<ConstantInt>(MI->getLength());
if (!Len || Len->isZero()) return;
if (!Len || Len->isZero()) return false;
AddDereferencedPointer(MI->getRawDest(), PtrSet, DL);
if (MI->getDestAddressSpace() == 0)
if (GetUnderlyingObject(MI->getRawDest(),
MI->getModule()->getDataLayout()) == Ptr)
return true;
if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI))
AddDereferencedPointer(MTI->getRawSource(), PtrSet, DL);
if (MTI->getSourceAddressSpace() == 0)
if (GetUnderlyingObject(MTI->getRawSource(),
MTI->getModule()->getDataLayout()) == Ptr)
return true;
}
return false;
}
bool LazyValueInfoImpl::isNonNullDueToDereferenceInBlock(
Value *Val, BasicBlock *BB) {
if (NullPointerIsDefined(BB->getParent(),
Val->getType()->getPointerAddressSpace()))
return false;
/// Return true if the allocation associated with Val is ever dereferenced
/// within the given basic block. This establishes the fact Val is not null,
/// but does not imply that the memory at Val is dereferenceable. (Val may
/// point off the end of the dereferenceable part of the object.)
static bool isObjectDereferencedInBlock(Value *Val, BasicBlock *BB) {
assert(Val->getType()->isPointerTy());
const DataLayout &DL = BB->getModule()->getDataLayout();
Val = GetUnderlyingObject(Val, DL);
LazyValueInfoCache::PerBlockValueCacheTy::iterator It;
bool NeedsInit;
std::tie(It, NeedsInit) = TheCache.getOrInitDereferencedPointers(BB);
if (NeedsInit)
Value *UnderlyingVal = GetUnderlyingObject(Val, DL);
// If 'GetUnderlyingObject' didn't converge, skip it. It won't converge
// inside InstructionDereferencesPointer either.
if (UnderlyingVal == GetUnderlyingObject(UnderlyingVal, DL, 1))
for (Instruction &I : *BB)
AddPointersDereferencedByInstruction(&I, It->second, DL);
return It->second.count(Val);
if (InstructionDereferencesPointer(&I, UnderlyingVal))
return true;
return false;
}
bool LazyValueInfoImpl::solveBlockValueNonLocal(ValueLatticeElement &BBLV,
Value *Val, BasicBlock *BB) {
Value *Val, BasicBlock *BB) {
ValueLatticeElement Result; // Start Undefined.
// If this is the entry block, we must be asking about an argument. The
// value is overdefined.
if (BB == &BB->getParent()->getEntryBlock()) {
assert(isa<Argument>(Val) && "Unknown live-in to the entry block");
BBLV = ValueLatticeElement::getOverdefined();
// Before giving up, see if we can prove the pointer non-null local to
// this particular block.
PointerType *PTy = dyn_cast<PointerType>(Val->getType());
if (PTy &&
(isKnownNonZero(Val, DL) ||
(isObjectDereferencedInBlock(Val, BB) &&
!NullPointerIsDefined(BB->getParent(), PTy->getAddressSpace())))) {
Result = ValueLatticeElement::getNot(ConstantPointerNull::get(PTy));
} else {
Result = ValueLatticeElement::getOverdefined();
}
BBLV = Result;
return true;
}
@ -763,6 +758,14 @@ bool LazyValueInfoImpl::solveBlockValueNonLocal(ValueLatticeElement &BBLV,
if (Result.isOverdefined()) {
LLVM_DEBUG(dbgs() << " compute BB '" << BB->getName()
<< "' - overdefined because of pred (non local).\n");
// Before giving up, see if we can prove the pointer non-null local to
// this particular block.
PointerType *PTy = dyn_cast<PointerType>(Val->getType());
if (PTy && isObjectDereferencedInBlock(Val, BB) &&
!NullPointerIsDefined(BB->getParent(), PTy->getAddressSpace())) {
Result = ValueLatticeElement::getNot(ConstantPointerNull::get(PTy));
}
BBLV = Result;
return true;
}
@ -835,24 +838,16 @@ void LazyValueInfoImpl::intersectAssumeOrGuardBlockValueConstantRange(
// If guards are not used in the module, don't spend time looking for them
auto *GuardDecl = BBI->getModule()->getFunction(
Intrinsic::getName(Intrinsic::experimental_guard));
if (GuardDecl && !GuardDecl->use_empty()) {
if (BBI->getIterator() == BBI->getParent()->begin())
return;
for (Instruction &I : make_range(std::next(BBI->getIterator().getReverse()),
BBI->getParent()->rend())) {
Value *Cond = nullptr;
if (match(&I, m_Intrinsic<Intrinsic::experimental_guard>(m_Value(Cond))))
BBLV = intersect(BBLV, getValueFromCondition(Val, Cond));
}
}
if (!GuardDecl || GuardDecl->use_empty())
return;
if (BBLV.isOverdefined()) {
// Check whether we're checking at the terminator, and the pointer has
// been dereferenced in this block.
PointerType *PTy = dyn_cast<PointerType>(Val->getType());
if (PTy && BBI->getParent()->getTerminator() == BBI &&
isNonNullDueToDereferenceInBlock(Val, BBI->getParent()))
BBLV = ValueLatticeElement::getNot(ConstantPointerNull::get(PTy));
if (BBI->getIterator() == BBI->getParent()->begin())
return;
for (Instruction &I : make_range(std::next(BBI->getIterator().getReverse()),
BBI->getParent()->rend())) {
Value *Cond = nullptr;
if (match(&I, m_Intrinsic<Intrinsic::experimental_guard>(m_Value(Cond))))
BBLV = intersect(BBLV, getValueFromCondition(Val, Cond));
}
}

View File

@ -108,7 +108,7 @@ d2:
d3:
%y = load i32*, i32** %ptr
store i32 1, i32* %y
%c2 = icmp eq i32* %y, @p
%c2 = icmp eq i32* %y, null
br i1 %c2, label %ret1, label %ret2
ret1:
@ -118,6 +118,5 @@ ret2:
ret void
}
@p = external global i32
!0 = !{}