From 04bc2a938eef08dd1231a2a82f6d4e4e8dd258ea Mon Sep 17 00:00:00 2001 From: chenhaozhe Date: Thu, 28 May 2020 10:30:56 +0800 Subject: [PATCH] fix performance of bert --- mindspore/ccsrc/pre_activate/common/helper.cc | 2 +- mindspore/nn/optim/lamb.py | 2 +- mindspore/ops/_grad/grad_math_ops.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mindspore/ccsrc/pre_activate/common/helper.cc b/mindspore/ccsrc/pre_activate/common/helper.cc index 649e2746b5c..f9bc9a3fb35 100644 --- a/mindspore/ccsrc/pre_activate/common/helper.cc +++ b/mindspore/ccsrc/pre_activate/common/helper.cc @@ -686,7 +686,7 @@ bool IsSameNode(const EquivPtr &equiv1, const EquivPtr &equiv2, const VarPtr &va MS_EXCEPTION_IF_NULL(equiv1_node); auto equiv2_node = GetAnfNodeByVar(equiv2, var_node); MS_EXCEPTION_IF_NULL(equiv2_node); - return equiv1_node == equiv2_node; + return *equiv1_node == *equiv2_node; } AnfNodePtr GetAnfNodeByVar(const EquivPtr &equiv, const VarPtr &var_node) { diff --git a/mindspore/nn/optim/lamb.py b/mindspore/nn/optim/lamb.py index b4d478f52ab..a6a38f164a7 100755 --- a/mindspore/nn/optim/lamb.py +++ b/mindspore/nn/optim/lamb.py @@ -180,7 +180,7 @@ class Lamb(Optimizer): beta2=0.999, eps=1e-6, weight_decay=0.0, - decay_filter=lambda x: 'LayerNorm' not in x.name and 'bias' not in x.name): + decay_filter=lambda x: 'layernorm' not in x.name.lower() and 'bias' not in x.name.lower()): super(Lamb, self).__init__(start_learning_rate, params) if self.is_group: diff --git a/mindspore/ops/_grad/grad_math_ops.py b/mindspore/ops/_grad/grad_math_ops.py index f457148d513..8edf6d82f24 100755 --- a/mindspore/ops/_grad/grad_math_ops.py +++ b/mindspore/ops/_grad/grad_math_ops.py @@ -191,8 +191,8 @@ def get_bprop_mul(self): mul_func = P.Mul() def bprop(x, y, out, dout): - bc_dx = mul_func(dout, y) - bc_dy = mul_func(dout, x) + bc_dx = mul_func(y, dout) + bc_dy = mul_func(x, dout) return binop_grad_common(x, y, bc_dx, bc_dy) return bprop