Remove useless nesting blok and dead return statement in TosaToLinalg.cpp (NFC)

Flagged by Coverity.
This commit is contained in:
Mehdi Amini 2022-01-03 06:02:21 +00:00
parent a1e62aa75b
commit e4e463e747
1 changed files with 227 additions and 235 deletions

View File

@ -1431,7 +1431,6 @@ public:
getNParallelLoopsAttrs(resultTy.getRank()));
rewriter.replaceOp(op, genericOp.getResult(0));
{
OpBuilder::InsertionGuard regionGuard(rewriter);
rewriter.createBlock(&genericOp.region(), genericOp.region().end(),
TypeRange({resultElementTy}));
@ -1440,8 +1439,8 @@ public:
Value x = rewriter.create<linalg::IndexOp>(loc, 2);
Value channel = rewriter.create<linalg::IndexOp>(loc, 3);
auto hwMin = rewriter.create<arith::ConstantOp>(
loc, rewriter.getI32IntegerAttr(0));
auto hwMin =
rewriter.create<arith::ConstantOp>(loc, rewriter.getI32IntegerAttr(0));
auto hMax = rewriter.create<arith::ConstantOp>(
loc, rewriter.getI32IntegerAttr(imageH - 1));
auto wMax = rewriter.create<arith::ConstantOp>(
@ -1561,10 +1560,10 @@ public:
arith::CmpIPredicate::slt, rewriter);
// Read the value from the input array.
iy = rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(),
iy);
ix = rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(),
ix);
iy =
rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(), iy);
ix =
rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(), ix);
Value result = rewriter.create<tensor::ExtractOp>(
loc, input, ValueRange{batch, iy, ix, channel});
@ -1593,14 +1592,14 @@ public:
x1 = clampHelper<arith::CmpIOp>(loc, x1, hwMin, wMax,
arith::CmpIPredicate::slt, rewriter);
y0 = rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(),
y0);
y1 = rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(),
y1);
x0 = rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(),
x0);
x1 = rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(),
x1);
y0 =
rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(), y0);
y1 =
rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(), y1);
x0 =
rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(), x0);
x1 =
rewriter.create<arith::IndexCastOp>(loc, rewriter.getIndexType(), x1);
Value y0x0 = rewriter.create<tensor::ExtractOp>(
loc, input, ValueRange{batch, y0, x0, channel});
@ -1628,8 +1627,7 @@ public:
Value bottomPart = dy;
Value topPart = rewriter.create<arith::SubFOp>(loc, oneVal, dy);
topAcc = rewriter.create<arith::MulFOp>(loc, topAcc, topPart);
bottomAcc =
rewriter.create<arith::MulFOp>(loc, bottomAcc, bottomPart);
bottomAcc = rewriter.create<arith::MulFOp>(loc, bottomAcc, bottomPart);
Value result = rewriter.create<arith::AddFOp>(loc, topAcc, bottomAcc);
rewriter.create<linalg::YieldOp>(loc, result);
@ -1661,18 +1659,12 @@ public:
Value bottomPart = dy;
Value topPart = rewriter.create<arith::SubIOp>(loc, unitVal, dy);
topAcc = rewriter.create<arith::MulIOp>(loc, topAcc, topPart);
bottomAcc =
rewriter.create<arith::MulIOp>(loc, bottomAcc, bottomPart);
bottomAcc = rewriter.create<arith::MulIOp>(loc, bottomAcc, bottomPart);
Value result = rewriter.create<arith::AddIOp>(loc, topAcc, bottomAcc);
rewriter.create<linalg::YieldOp>(loc, result);
return success();
}
return failure();
}
return success();
}
};