!26600 [Auto parallel] Adjusting sharding propagation

Merge pull request !26600 from Xiaoda/105-adjusting-sharding-propagation
This commit is contained in:
i-robot 2021-11-23 01:24:47 +00:00 committed by Gitee
commit 0d69920358
1 changed files with 2 additions and 2 deletions

View File

@ -526,7 +526,7 @@ int64_t ReshapeInfo::GetSWCIndexByOutputLayoutWithZeroComm(const TensorLayout &o
}
}
if (index_computation.empty()) {
MS_LOG(ERROR) << "There in no available strategy for zero communication cost for reshape: " << name();
MS_LOG(WARNING) << "There in no available strategy for zero communication cost for reshape: " << name();
return -1;
}
if (index_computation.size() > 1) {
@ -569,7 +569,7 @@ int64_t ReshapeInfo::GetSWCIndexByInputLayoutWithZeroComm(const TensorLayout &in
}
}
if (index_computation.empty()) {
MS_LOG(ERROR) << "There in no available strategy for zero communication cost for reshape: " << name();
MS_LOG(WARNING) << "There in no available strategy for zero communication cost for reshape: " << name();
return -1;
}
if (index_computation.size() > 1) {