!15585 fix code check

From: @huangbingjian
Reviewed-by: @ginfung,@zh_qh
Signed-off-by: @zh_qh
This commit is contained in:
mindspore-ci-bot 2021-04-25 14:05:39 +08:00 committed by Gitee
commit f85b7d56d1
4 changed files with 6 additions and 6 deletions

View File

@ -880,7 +880,7 @@ static std::vector<std::pair<CNodePtr, CNodePtr>> FindPrimalJPair(const FuncGrap
} else if (IsPrimitive(cnode->inputs().at(0), prim::kPrimJ)) {
// To find J user.
auto j_user = GetJUser(node_user_map, cnode, index);
primal_j_pair.push_back({nullptr, j_user});
primal_j_pair.emplace_back(std::pair<CNodePtr, CNodePtr>(nullptr, j_user));
}
}
@ -985,7 +985,7 @@ void DFunctor::EliminatePrimalGraph() {
idx0->set_abstract(std::make_shared<abstract::AbstractScalar>(imm0));
auto getitem0 = construct_wrapper->NewCNode({tuple_getitem, primal_user, idx0});
getitem0->set_abstract(primal_abs);
manager->Replace(primal_user, getitem0);
(void)manager->Replace(primal_user, getitem0);
}
}
} // namespace ad

View File

@ -131,7 +131,7 @@ std::string ConvertToString(uint32_t *input, const int &size) {
oss << std::hex;
for (int i = 0; i < size; ++i) {
for (int j = static_cast<int>(sizeof(uint32_t) / sizeof(uint8_t)) - 1; j >= 0; --j) {
uint8_t val = static_cast<uint8_t>((input[i] >> static_cast<uint32_t>(j * kBitNumber)) & 0xff);
auto val = static_cast<uint8_t>((input[i] >> static_cast<uint32_t>(j * kBitNumber)) & 0xff);
oss << std::setw(2) << std::setfill('0') << static_cast<unsigned int>(val);
}
}

View File

@ -387,12 +387,12 @@ class TensorDataImpl : public TensorData {
}
// Replace # with placeholder.
size_t index = str.find('#');
while (index != str.npos) {
while (index != std::string::npos) {
size_t pos = index;
while (str[pos] == '#') {
pos++;
}
int len = pos - index;
size_t len = pos - index;
std::string space(max_width - len, ' ');
str = str.replace(index, len, space);
index = str.find('#', index);

View File

@ -133,7 +133,7 @@ class GPT2FinetuneCell(nn.Cell):
init = self.alloc_status()
init = F.depend(init, loss)
clear_before_grad = self.clear_before_grad(init)
self.depend_parameter_use(clear_before_grad, scaling_sens)
scaling_sens = F.depend(scaling_sens, clear_before_grad)
grads = self.grad(self.network, weights)(input_ids,
input_mask,
label_ids,