fix ConfusionSoftmaxGrad fusion pass work if the ReduceSum's attr of keep_dims set False

This commit is contained in:
huanghui 2020-06-19 10:24:32 +08:00
parent 1424178601
commit c7bda5363d
3 changed files with 26 additions and 38 deletions

View File

@ -25,29 +25,8 @@
namespace mindspore { namespace mindspore {
namespace opt { namespace opt {
namespace {
void SetAttrsForFusionNode(const AnfNodePtr &sub_anf, const AnfNodePtr &fusion_node) {
MS_EXCEPTION_IF_NULL(sub_anf);
MS_EXCEPTION_IF_NULL(fusion_node);
auto sub = sub_anf->cast<CNodePtr>();
MS_EXCEPTION_IF_NULL(sub);
if (sub->size() != kSubInputNum) {
MS_LOG(EXCEPTION) << "Sub's size is not equal with 3";
}
auto reduce_sum_anf = sub->input(2);
MS_EXCEPTION_IF_NULL(reduce_sum_anf);
auto reduce_sum = reduce_sum_anf->cast<CNodePtr>();
if (reduce_sum == nullptr) {
MS_LOG(EXCEPTION) << "Sub's second input is not a cnode";
}
AnfAlgo::CopyNodeAttr(kAttrAxis, reduce_sum, fusion_node);
AnfAlgo::CopyNodeAttr(kAttrKeepDims, reduce_sum, fusion_node);
}
} // namespace
const BaseRef ConfusionSoftmaxGradRule::DefinePattern() const { const BaseRef ConfusionSoftmaxGradRule::DefinePattern() const {
return VectorRef( return VectorRef({prim::kPrimSub, input0_, VectorRef({reduce_sum_, VectorRef({prim::kPrimMul, input1_, input0_})})});
{prim::kPrimSub, input0_, VectorRef({prim::kPrimReduceSum, VectorRef({prim::kPrimMul, input1_, input0_})})});
} }
const AnfNodePtr ConfusionSoftmaxGradRule::Process(const FuncGraphPtr &graph, const AnfNodePtr &node, const AnfNodePtr ConfusionSoftmaxGradRule::Process(const FuncGraphPtr &graph, const AnfNodePtr &node,
@ -55,22 +34,28 @@ const AnfNodePtr ConfusionSoftmaxGradRule::Process(const FuncGraphPtr &graph, co
MS_EXCEPTION_IF_NULL(graph); MS_EXCEPTION_IF_NULL(graph);
MS_EXCEPTION_IF_NULL(node); MS_EXCEPTION_IF_NULL(node);
MS_EXCEPTION_IF_NULL(equiv); MS_EXCEPTION_IF_NULL(equiv);
auto input0 = utils::cast<AnfNodePtr>((*equiv)[input0_]); AnfNodePtr input0 = GetAnfNodeByVar(equiv, input0_);
auto input1 = utils::cast<AnfNodePtr>((*equiv)[input1_]); AnfNodePtr input1 = GetAnfNodeByVar(equiv, input1_);
MS_EXCEPTION_IF_NULL(input0); AnfNodePtr sum_anf = GetAnfNodeByVar(equiv, reduce_sum_);
MS_EXCEPTION_IF_NULL(input1); if (sum_anf == nullptr || !sum_anf->isa<CNode>()) {
MS_LOG(WARNING) << "Matched ReduceSum is not a CNode!";
return nullptr;
}
if (!GetBoolAttr(sum_anf, kAttrKeepDims)) {
MS_LOG(INFO) << "ReduceSum's attr keep_dims should be true if do fusion. Otherwise the calculation will be wrong";
return nullptr;
}
auto prim = std::make_shared<Primitive>(kConfusionSoftmaxGradOpName); auto prim = std::make_shared<Primitive>(kConfusionSoftmaxGradOpName);
MS_EXCEPTION_IF_NULL(prim); MS_EXCEPTION_IF_NULL(prim);
std::vector<AnfNodePtr> inputs = {NewValueNode(prim), input0, input1}; std::vector<AnfNodePtr> inputs = {NewValueNode(prim), input0, input1};
auto confusion_softmax_grad = graph->NewCNode(inputs); auto fusion_node = graph->NewCNode(inputs);
MS_EXCEPTION_IF_NULL(confusion_softmax_grad); MS_EXCEPTION_IF_NULL(fusion_node);
auto types = {AnfAlgo::GetOutputInferDataType(node, 0)}; fusion_node->set_abstract(node->abstract());
auto shapes = {AnfAlgo::GetOutputInferShape(node, 0)}; fusion_node->set_scope(node->scope());
AnfAlgo::SetOutputInferTypeAndShape(types, shapes, confusion_softmax_grad.get()); AnfAlgo::CopyNodeAttr(kAttrAxis, sum_anf, fusion_node);
confusion_softmax_grad->set_scope(node->scope()); AnfAlgo::CopyNodeAttr(kAttrKeepDims, sum_anf, fusion_node);
SetAttrsForFusionNode(node, confusion_softmax_grad); return fusion_node;
return confusion_softmax_grad;
} }
} // namespace opt } // namespace opt
} // namespace mindspore } // namespace mindspore

View File

@ -24,9 +24,11 @@ namespace opt {
class ConfusionSoftmaxGradRule : public PatternProcessPass { class ConfusionSoftmaxGradRule : public PatternProcessPass {
public: public:
explicit ConfusionSoftmaxGradRule(bool multigraph = true) explicit ConfusionSoftmaxGradRule(bool multigraph = true)
: PatternProcessPass("confusion_softmax_grad_rule", multigraph), : PatternProcessPass("confusion_softmax_grad_rule", multigraph) {
input0_(std::make_shared<Var>()), input0_ = std::make_shared<Var>();
input1_(std::make_shared<Var>()) {} input1_ = std::make_shared<Var>();
reduce_sum_ = std::make_shared<Var>(std::make_shared<Primitive>(prim::kPrimReduceSum->name()));
}
~ConfusionSoftmaxGradRule() override = default; ~ConfusionSoftmaxGradRule() override = default;
const BaseRef DefinePattern() const override; const BaseRef DefinePattern() const override;
const AnfNodePtr Process(const FuncGraphPtr &, const AnfNodePtr &, const EquivPtr &) const override; const AnfNodePtr Process(const FuncGraphPtr &, const AnfNodePtr &, const EquivPtr &) const override;
@ -34,6 +36,7 @@ class ConfusionSoftmaxGradRule : public PatternProcessPass {
private: private:
VarPtr input0_; VarPtr input0_;
VarPtr input1_; VarPtr input1_;
VarPtr reduce_sum_;
}; };
} // namespace opt } // namespace opt
} // namespace mindspore } // namespace mindspore

View File

@ -16,7 +16,7 @@ from mindspore.ops import Primitive
from mindspore.ops import operations as P from mindspore.ops import operations as P
mul = P.Mul() mul = P.Mul()
reduce_sum = P.ReduceSum() reduce_sum = P.ReduceSum(keep_dims=True)
sub = P.Sub() sub = P.Sub()
confusion_softmax_grad = Primitive('ConfusionSoftmaxGrad') confusion_softmax_grad = Primitive('ConfusionSoftmaxGrad')
make_tuple = Primitive('make_tuple') make_tuple = Primitive('make_tuple')