forked from OSchip/llvm-project
[ScopBuilder] Move canonicalizeDynamicsBasePtrs from ScopInfo. NFC.
Refactor Scop and ScopBuilder class. Move canonicalizeDynamicsBasePtrs and corresponding static functions from ScopInfo.cpp to ScopBuilder.cpp Patch by Dominik Adamski <adamski.dominik@gmail.com> Differential Revision: https://reviews.llvm.org/D62781 llvm-svn: 362554
This commit is contained in:
parent
2d37cb82f0
commit
06c801e153
|
@ -391,6 +391,34 @@ class ScopBuilder {
|
|||
/// Build the access relation of all memory accesses of @p Stmt.
|
||||
void buildAccessRelations(ScopStmt &Stmt);
|
||||
|
||||
/// Canonicalize arrays with base pointers from the same equivalence class.
|
||||
///
|
||||
/// Some context: in our normal model we assume that each base pointer is
|
||||
/// related to a single specific memory region, where memory regions
|
||||
/// associated with different base pointers are disjoint. Consequently we do
|
||||
/// not need to compute additional data dependences that model possible
|
||||
/// overlaps of these memory regions. To verify our assumption we compute
|
||||
/// alias checks that verify that modeled arrays indeed do not overlap. In
|
||||
/// case an overlap is detected the runtime check fails and we fall back to
|
||||
/// the original code.
|
||||
///
|
||||
/// In case of arrays where the base pointers are know to be identical,
|
||||
/// because they are dynamically loaded by accesses that are in the same
|
||||
/// invariant load equivalence class, such run-time alias check would always
|
||||
/// be false.
|
||||
///
|
||||
/// This function makes sure that we do not generate consistently failing
|
||||
/// run-time checks for code that contains distinct arrays with known
|
||||
/// equivalent base pointers. It identifies for each invariant load
|
||||
/// equivalence class a single canonical array and canonicalizes all memory
|
||||
/// accesses that reference arrays that have base pointers that are known to
|
||||
/// be equal to the base pointer of such a canonical array to this canonical
|
||||
/// array.
|
||||
///
|
||||
/// We currently do not canonicalize arrays for which certain memory accesses
|
||||
/// have been hoisted as loop invariant.
|
||||
void canonicalizeDynamicBasePtrs();
|
||||
|
||||
public:
|
||||
explicit ScopBuilder(Region *R, AssumptionCache &AC, AliasAnalysis &AA,
|
||||
const DataLayout &DL, DominatorTree &DT, LoopInfo &LI,
|
||||
|
|
|
@ -2059,34 +2059,6 @@ private:
|
|||
/// Required inv. loads: LB[0], LB[1], (V, if it may alias with A or LB)
|
||||
void hoistInvariantLoads();
|
||||
|
||||
/// Canonicalize arrays with base pointers from the same equivalence class.
|
||||
///
|
||||
/// Some context: in our normal model we assume that each base pointer is
|
||||
/// related to a single specific memory region, where memory regions
|
||||
/// associated with different base pointers are disjoint. Consequently we do
|
||||
/// not need to compute additional data dependences that model possible
|
||||
/// overlaps of these memory regions. To verify our assumption we compute
|
||||
/// alias checks that verify that modeled arrays indeed do not overlap. In
|
||||
/// case an overlap is detected the runtime check fails and we fall back to
|
||||
/// the original code.
|
||||
///
|
||||
/// In case of arrays where the base pointers are know to be identical,
|
||||
/// because they are dynamically loaded by accesses that are in the same
|
||||
/// invariant load equivalence class, such run-time alias check would always
|
||||
/// be false.
|
||||
///
|
||||
/// This function makes sure that we do not generate consistently failing
|
||||
/// run-time checks for code that contains distinct arrays with known
|
||||
/// equivalent base pointers. It identifies for each invariant load
|
||||
/// equivalence class a single canonical array and canonicalizes all memory
|
||||
/// accesses that reference arrays that have base pointers that are known to
|
||||
/// be equal to the base pointer of such a canonical array to this canonical
|
||||
/// array.
|
||||
///
|
||||
/// We currently do not canonicalize arrays for which certain memory accesses
|
||||
/// have been hoisted as loop invariant.
|
||||
void canonicalizeDynamicBasePtrs();
|
||||
|
||||
/// Check if @p MA can always be hoisted without execution context.
|
||||
bool canAlwaysBeHoisted(MemoryAccess *MA, bool StmtInvalidCtxIsEmpty,
|
||||
bool MAInvalidCtxIsEmpty,
|
||||
|
|
|
@ -1366,6 +1366,76 @@ void ScopBuilder::collectCandidateReductionLoads(
|
|||
Loads.push_back(&Stmt->getArrayAccessFor(PossibleLoad1));
|
||||
}
|
||||
|
||||
/// Find the canonical scop array info object for a set of invariant load
|
||||
/// hoisted loads. The canonical array is the one that corresponds to the
|
||||
/// first load in the list of accesses which is used as base pointer of a
|
||||
/// scop array.
|
||||
static const ScopArrayInfo *findCanonicalArray(Scop &S,
|
||||
MemoryAccessList &Accesses) {
|
||||
for (MemoryAccess *Access : Accesses) {
|
||||
const ScopArrayInfo *CanonicalArray = S.getScopArrayInfoOrNull(
|
||||
Access->getAccessInstruction(), MemoryKind::Array);
|
||||
if (CanonicalArray)
|
||||
return CanonicalArray;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/// Check if @p Array severs as base array in an invariant load.
|
||||
static bool isUsedForIndirectHoistedLoad(Scop &S, const ScopArrayInfo *Array) {
|
||||
for (InvariantEquivClassTy &EqClass2 : S.getInvariantAccesses())
|
||||
for (MemoryAccess *Access2 : EqClass2.InvariantAccesses)
|
||||
if (Access2->getScopArrayInfo() == Array)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Replace the base pointer arrays in all memory accesses referencing @p Old,
|
||||
/// with a reference to @p New.
|
||||
static void replaceBasePtrArrays(Scop &S, const ScopArrayInfo *Old,
|
||||
const ScopArrayInfo *New) {
|
||||
for (ScopStmt &Stmt : S)
|
||||
for (MemoryAccess *Access : Stmt) {
|
||||
if (Access->getLatestScopArrayInfo() != Old)
|
||||
continue;
|
||||
|
||||
isl::id Id = New->getBasePtrId();
|
||||
isl::map Map = Access->getAccessRelation();
|
||||
Map = Map.set_tuple_id(isl::dim::out, Id);
|
||||
Access->setAccessRelation(Map);
|
||||
}
|
||||
}
|
||||
|
||||
void ScopBuilder::canonicalizeDynamicBasePtrs() {
|
||||
for (InvariantEquivClassTy &EqClass : scop->InvariantEquivClasses) {
|
||||
MemoryAccessList &BasePtrAccesses = EqClass.InvariantAccesses;
|
||||
|
||||
const ScopArrayInfo *CanonicalBasePtrSAI =
|
||||
findCanonicalArray(*scop, BasePtrAccesses);
|
||||
|
||||
if (!CanonicalBasePtrSAI)
|
||||
continue;
|
||||
|
||||
for (MemoryAccess *BasePtrAccess : BasePtrAccesses) {
|
||||
const ScopArrayInfo *BasePtrSAI = scop->getScopArrayInfoOrNull(
|
||||
BasePtrAccess->getAccessInstruction(), MemoryKind::Array);
|
||||
if (!BasePtrSAI || BasePtrSAI == CanonicalBasePtrSAI ||
|
||||
!BasePtrSAI->isCompatibleWith(CanonicalBasePtrSAI))
|
||||
continue;
|
||||
|
||||
// we currently do not canonicalize arrays where some accesses are
|
||||
// hoisted as invariant loads. If we would, we need to update the access
|
||||
// function of the invariant loads as well. However, as this is not a
|
||||
// very common situation, we leave this for now to avoid further
|
||||
// complexity increases.
|
||||
if (isUsedForIndirectHoistedLoad(*scop, BasePtrSAI))
|
||||
continue;
|
||||
|
||||
replaceBasePtrArrays(*scop, BasePtrSAI, CanonicalBasePtrSAI);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ScopBuilder::buildAccessRelations(ScopStmt &Stmt) {
|
||||
for (MemoryAccess *Access : Stmt.MemAccs) {
|
||||
Type *ElementType = Access->getElementType();
|
||||
|
@ -1601,7 +1671,7 @@ void ScopBuilder::buildScop(Region &R, AssumptionCache &AC,
|
|||
}
|
||||
|
||||
scop->hoistInvariantLoads();
|
||||
scop->canonicalizeDynamicBasePtrs();
|
||||
canonicalizeDynamicBasePtrs();
|
||||
verifyInvariantLoads();
|
||||
scop->simplifySCoP(true);
|
||||
|
||||
|
|
|
@ -3797,76 +3797,6 @@ void Scop::hoistInvariantLoads() {
|
|||
}
|
||||
}
|
||||
|
||||
/// Find the canonical scop array info object for a set of invariant load
|
||||
/// hoisted loads. The canonical array is the one that corresponds to the
|
||||
/// first load in the list of accesses which is used as base pointer of a
|
||||
/// scop array.
|
||||
static const ScopArrayInfo *findCanonicalArray(Scop *S,
|
||||
MemoryAccessList &Accesses) {
|
||||
for (MemoryAccess *Access : Accesses) {
|
||||
const ScopArrayInfo *CanonicalArray = S->getScopArrayInfoOrNull(
|
||||
Access->getAccessInstruction(), MemoryKind::Array);
|
||||
if (CanonicalArray)
|
||||
return CanonicalArray;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/// Check if @p Array severs as base array in an invariant load.
|
||||
static bool isUsedForIndirectHoistedLoad(Scop *S, const ScopArrayInfo *Array) {
|
||||
for (InvariantEquivClassTy &EqClass2 : S->getInvariantAccesses())
|
||||
for (MemoryAccess *Access2 : EqClass2.InvariantAccesses)
|
||||
if (Access2->getScopArrayInfo() == Array)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
/// Replace the base pointer arrays in all memory accesses referencing @p Old,
|
||||
/// with a reference to @p New.
|
||||
static void replaceBasePtrArrays(Scop *S, const ScopArrayInfo *Old,
|
||||
const ScopArrayInfo *New) {
|
||||
for (ScopStmt &Stmt : *S)
|
||||
for (MemoryAccess *Access : Stmt) {
|
||||
if (Access->getLatestScopArrayInfo() != Old)
|
||||
continue;
|
||||
|
||||
isl::id Id = New->getBasePtrId();
|
||||
isl::map Map = Access->getAccessRelation();
|
||||
Map = Map.set_tuple_id(isl::dim::out, Id);
|
||||
Access->setAccessRelation(Map);
|
||||
}
|
||||
}
|
||||
|
||||
void Scop::canonicalizeDynamicBasePtrs() {
|
||||
for (InvariantEquivClassTy &EqClass : InvariantEquivClasses) {
|
||||
MemoryAccessList &BasePtrAccesses = EqClass.InvariantAccesses;
|
||||
|
||||
const ScopArrayInfo *CanonicalBasePtrSAI =
|
||||
findCanonicalArray(this, BasePtrAccesses);
|
||||
|
||||
if (!CanonicalBasePtrSAI)
|
||||
continue;
|
||||
|
||||
for (MemoryAccess *BasePtrAccess : BasePtrAccesses) {
|
||||
const ScopArrayInfo *BasePtrSAI = getScopArrayInfoOrNull(
|
||||
BasePtrAccess->getAccessInstruction(), MemoryKind::Array);
|
||||
if (!BasePtrSAI || BasePtrSAI == CanonicalBasePtrSAI ||
|
||||
!BasePtrSAI->isCompatibleWith(CanonicalBasePtrSAI))
|
||||
continue;
|
||||
|
||||
// we currently do not canonicalize arrays where some accesses are
|
||||
// hoisted as invariant loads. If we would, we need to update the access
|
||||
// function of the invariant loads as well. However, as this is not a
|
||||
// very common situation, we leave this for now to avoid further
|
||||
// complexity increases.
|
||||
if (isUsedForIndirectHoistedLoad(this, BasePtrSAI))
|
||||
continue;
|
||||
|
||||
replaceBasePtrArrays(this, BasePtrSAI, CanonicalBasePtrSAI);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ScopArrayInfo *Scop::getOrCreateScopArrayInfo(Value *BasePtr, Type *ElementType,
|
||||
ArrayRef<const SCEV *> Sizes,
|
||||
MemoryKind Kind,
|
||||
|
|
Loading…
Reference in New Issue