!7608 [AutoParallel] add Dropout operator

Merge pull request !7608 from Chong/CheckParserInfo
This commit is contained in:
mindspore-ci-bot 2020-10-22 14:27:52 +08:00 committed by Gitee
commit 391a1951ed
2 changed files with 9 additions and 1 deletions

View File

@ -416,7 +416,7 @@ Strategys PrepareStrategy(const std::shared_ptr<Graph> &graph, const std::vector
return PrepareMatMul(graph, ops, iter_graph, iter_ops);
} else if (type == ONEHOT) {
return PrepareOneHot(graph, ops, iter_graph, iter_ops);
} else if ((type == SPARSE_SOFTMAX_CROSS_ENTROPY_WITH_LOGITS) || (type == "_VirtualDataset")) {
} else if ((type == SPARSE_SOFTMAX_CROSS_ENTROPY_WITH_LOGITS) || (type == "_VirtualDataset") || (type == "Dropout")) {
return MakeDataParallelStrategy(graph, ops, iter_graph, iter_ops);
} else {
return MakeRecSearchStrategy(graph, ops, iter_graph, iter_ops);

View File

@ -53,6 +53,10 @@ Graph::NodeType MakeNewOperator(const std::vector<std::shared_ptr<OperatorInfo>>
NewOp.apply.op_type = DictOpType.at(op_type);
}
if (ops[iter_ops]->outputs_tensor_info().size() == 0) {
MS_LOG(EXCEPTION) << ops[iter_ops]->name() << " output tensor info is empty.";
}
if (ops[iter_ops]->outputs_tensor_info()[0].shape().size() == 4) {
NewOp.tensor_parm = MakeTensor(
ops[iter_ops]->outputs_tensor_info()[0].shape()[0], ops[iter_ops]->outputs_tensor_info()[0].shape()[1],
@ -74,6 +78,10 @@ Graph::NodeType MakeNewOperator(const std::vector<std::shared_ptr<OperatorInfo>>
OperatorRec CompleteOperatorInputs(const std::vector<std::shared_ptr<OperatorInfo>> &ops, const size_t iter_ops,
Graph::NodeType NewTensor) {
if (ops[iter_ops]->inputs_tensor_info().size() > MAX_INPUT_NUM) {
MS_LOG(EXCEPTION) << ops[iter_ops]->name() << " input tensor num exceeds limit.";
}
for (size_t iter_input_tensors = 0; iter_input_tensors < ops[iter_ops]->inputs_tensor_info().size();
iter_input_tensors++) {
if (ops[iter_ops]->inputs_tensor_info()[iter_input_tensors].shape().size() == 4) {