Revert "Always treat scalar writes as MUST_WRITEs"

This reverts commit r255471.

Johannes raised in the post-commit review of r255471 the concern that PHI
writes in non-affine regions with two exiting blocks are not really MUST_WRITE,
but we just know that at least one out of the set of all possible PHI writes
will be executed. Modeling all PHI nodes as MUST_WRITEs is probably save, but
adding the needed documentation for such a special case is probably not worth
the effort. Michael will be proposing a new patch that ensures only a single
PHI_WRITE is created for non-affine regions, which - besides other benefits -
should also allow us to use a single well-defined MUST_WRITE for such PHI
writes.

(This is not a full revert, but the condition and documentation have been
slightly extended)

llvm-svn: 255503
This commit is contained in:
Tobias Grosser 2015-12-14 15:05:37 +00:00
parent 1e6e3c60b4
commit f4f6870ff2
6 changed files with 24 additions and 16 deletions

View File

@ -3885,14 +3885,22 @@ void ScopInfo::addMemoryAccess(BasicBlock *BB, Instruction *Inst,
Value *BaseAddr = BaseAddress;
std::string BaseName = getIslCompatibleName("MemRef_", BaseAddr, "");
// The execution of a store is not guaranteed if its parent block is not
// guaranteed to executed, here tested by checking whether it dominates the
// exit block. However, implicit writes (llvm::Value definitions or one of a
// PHI's incoming values) must occur in well-formed IR code.
bool isApproximated = (Kind == ScopArrayInfo::MK_Array) &&
Stmt->isRegionStmt() &&
!DT->dominates(BB, Stmt->getRegion()->getExit());
if (isApproximated && Type == MemoryAccess::MUST_WRITE)
bool isKnownMustAccess = false;
// Accesses in single-basic block statements are always excuted.
if (Stmt->isBlockStmt())
isKnownMustAccess = true;
if (Stmt->isRegionStmt()) {
// Accesses that dominate the exit block of a non-affine region are always
// executed. In non-affine regions there may exist MK_Values that do not
// dominate the exit. MK_Values will always dominate the exit and MK_PHIs
// only if there is at most one PHI_WRITE in the non-affine region.
if (DT->dominates(BB, Stmt->getRegion()->getExit()))
isKnownMustAccess = true;
}
if (!isKnownMustAccess && Type == MemoryAccess::MUST_WRITE)
Type = MemoryAccess::MAY_WRITE;
AccList.emplace_back(Stmt, Inst, Type, BaseAddress, ElemBytes, Affine,

View File

@ -39,7 +39,7 @@
; CHECK: [N] -> { Stmt_bb4__TO__bb18[i0] -> MemRef_A[i0] };
; CHECK: ReadAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: [N] -> { Stmt_bb4__TO__bb18[i0] -> MemRef_smax[] };
; CHECK: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: MayWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: [N] -> { Stmt_bb4__TO__bb18[i0] -> MemRef_j_2__phi[] };
; CHECK: ReadAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: [N] -> { Stmt_bb4__TO__bb18[i0] -> MemRef_j_0[] };

View File

@ -10,9 +10,9 @@
; CHECK-NEXT: { Stmt_loop__TO__backedge[i0] -> [i0, 0] };
; CHECK-NEXT: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK-NEXT: { Stmt_loop__TO__backedge[i0] -> MemRef_merge__phi[] };
; CHECK-NEXT: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK-NEXT: MayWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK-NEXT: { Stmt_loop__TO__backedge[i0] -> MemRef_merge__phi[] };
; CHECK-NEXT: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK-NEXT: MayWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK-NEXT: { Stmt_loop__TO__backedge[i0] -> MemRef_merge__phi[] };
; CHECK-NEXT: Stmt_backedge
; CHECK-NEXT: Domain :=

View File

@ -35,7 +35,7 @@
; CHECK-NEXT: { Stmt_bb3__TO__bb18[i0] -> MemRef_A[i0] };
; CHECK-NOT: { Stmt_bb3__TO__bb18[i0] -> MemRef_x_0[] };
; CHECK-NOT: { Stmt_bb3__TO__bb18[i0] -> MemRef_x_1[] };
; CHECK: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: MayWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK-NEXT: { Stmt_bb3__TO__bb18[i0] -> MemRef_x_2__phi[] };
; CHECK-NOT: { Stmt_bb3__TO__bb18[i0] -> MemRef_x_0[] };
; CHECK-NOT: { Stmt_bb3__TO__bb18[i0] -> MemRef_x_1[] };

View File

@ -31,11 +31,11 @@
; CHECK: { Stmt_bb3__TO__bb18[i0] -> [i0, 0] };
; CHECK: ReadAccess := [Reduction Type: NONE] [Scalar: 0]
; CHECK: { Stmt_bb3__TO__bb18[i0] -> MemRef_A[i0] };
; CHECK: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: MayWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: { Stmt_bb3__TO__bb18[i0] -> MemRef_x_2__phi[] };
; CHECK: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: MayWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: { Stmt_bb3__TO__bb18[i0] -> MemRef_x_2__phi[] };
; CHECK: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: MayWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: { Stmt_bb3__TO__bb18[i0] -> MemRef_x_2__phi[] };
; CHECK: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: { Stmt_bb3__TO__bb18[i0] -> MemRef_x_2__phi[] };

View File

@ -39,7 +39,7 @@
; CHECK: { Stmt_bb2__TO__bb7[i0] -> MemRef_A[i0] };
; CHECK: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: { Stmt_bb2__TO__bb7[i0] -> MemRef_x[] };
; CHECK: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: MayWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: { Stmt_bb2__TO__bb7[i0] -> MemRef_y__phi[] };
; CHECK: MustWriteAccess := [Reduction Type: NONE] [Scalar: 1]
; CHECK: { Stmt_bb2__TO__bb7[i0] -> MemRef_y__phi[] };