forked from mindspore-Ecosystem/mindspore
Optimize of exception log of AD
This commit is contained in:
parent
7c88502ef5
commit
752bdde06d
|
@ -120,3 +120,4 @@
|
|||
"mindspore/tests/st/explainer/explanation/_attribution/_backprop/test_gradcam.py" "not-callable"
|
||||
"mindspore/tests/st/explainer/explanation/_attribution/_backprop/test_gradient.py" "not-callable"
|
||||
"mindspore/tests/st/explainer/explanation/_attribution/_backprop/test_modified_relu.py" "not-callable"
|
||||
"mindspore/tests/ut/python/optimizer/test_auto_grad.py" "broad-except"
|
||||
|
|
|
@ -701,14 +701,15 @@ void GradOperation::GradByParameter(const FuncGraphPtr &k_child, const AnfNodePt
|
|||
// Generate the graph.
|
||||
FuncGraphPtr GradOperation::GenerateFuncGraph(const AbstractBasePtrList &args_spec_list) {
|
||||
if (args_spec_list.empty()) {
|
||||
MS_LOG(EXCEPTION) << "GenerateGraph requires at least 1 parameters, while the input size is "
|
||||
<< args_spec_list.size() << ".";
|
||||
MS_LOG(EXCEPTION)
|
||||
<< "'GradOperation' requires a forward network or function as an input, while the input is empty.";
|
||||
}
|
||||
|
||||
MS_EXCEPTION_IF_NULL(args_spec_list[0]);
|
||||
AbstractFunctionPtr fn = dyn_cast<AbstractFunction>(args_spec_list[0]);
|
||||
if (fn == nullptr) {
|
||||
MS_LOG(EXCEPTION) << "GradOperation arg0 must be AbstractFunction, but " << args_spec_list[0]->ToString();
|
||||
MS_LOG(EXCEPTION) << "'GradOperation' arg0 must be a 'Function' or 'Cell', but got "
|
||||
<< args_spec_list[0]->ToString();
|
||||
}
|
||||
|
||||
// Waiting for implementation.
|
||||
|
|
|
@ -464,8 +464,9 @@ FuncGraphPtr DFunctor::KUserDefined(const FuncGraphPtr &primal) {
|
|||
resources_->manager()->AddFuncGraph(bprop_graph);
|
||||
|
||||
if (!bprop_graph->free_variables_nodes().empty() || !primal->free_variables_nodes().empty()) {
|
||||
MS_LOG(EXCEPTION) << "User defined Cell bprop " << primal->ToString() << " in scope "
|
||||
<< primal->output()->scope()->name() << " does not support Parameter data type.";
|
||||
MS_LOG(EXCEPTION) << "The Cell with user defined 'bprop' function in scope " << primal->output()->scope()->name()
|
||||
<< " does not support Parameter data type.\n"
|
||||
<< trace::GetDebugInfo(bprop_graph->debug_info());
|
||||
}
|
||||
bprop_graph->set_flag(mindspore::kFuncGraphFlagBackPropEntry, true);
|
||||
bprop_graph->set_flag(mindspore::kFuncGraphFlagReAutoMonad, true);
|
||||
|
|
|
@ -189,8 +189,6 @@ FuncGraphPtr KPrim::BpropToK(const T &primal, const FuncGraphPtr &bprop_fg, cons
|
|||
MS_EXCEPTION_IF_NULL(bprop_fg);
|
||||
CheckBprop(bprop_fg, primal->ToString());
|
||||
|
||||
auto debug_info = std::make_shared<GraphDebugInfo>();
|
||||
debug_info->set_name(primal->ToString());
|
||||
FuncGraphPtr cloned_bprop_fg;
|
||||
{
|
||||
PrimalAttrGuard primal_attr_guard(primal_attrs);
|
||||
|
@ -199,21 +197,30 @@ FuncGraphPtr KPrim::BpropToK(const T &primal, const FuncGraphPtr &bprop_fg, cons
|
|||
}
|
||||
MS_EXCEPTION_IF_NULL(cloned_bprop_fg);
|
||||
|
||||
GraphDebugInfoPtr debug_info = nullptr;
|
||||
{
|
||||
TraceGuard guard(std::make_shared<TraceGradFprop>(bprop_fg->debug_info()));
|
||||
debug_info = std::make_shared<GraphDebugInfo>();
|
||||
debug_info->set_name(primal->ToString());
|
||||
}
|
||||
cloned_bprop_fg->debug_info()->set_name("");
|
||||
cloned_bprop_fg->debug_info()->set_trace_info(std::make_shared<TraceGradBprop>(debug_info));
|
||||
|
||||
// Make sure (out, dout) provided.
|
||||
if (cloned_bprop_fg->parameters().size() < 2) {
|
||||
MS_LOG(EXCEPTION) << "Primitive or Cell " << primal->ToString()
|
||||
<< " bprop requires out and dout at least, but only got " << cloned_bprop_fg->parameters().size()
|
||||
<< " params. NodeInfo: " << trace::GetDebugInfo(cloned_bprop_fg->debug_info());
|
||||
MS_LOG(EXCEPTION)
|
||||
<< "The function 'bprop' of Primitive or Cell requires at least 2 params 'out' and 'dout', but got only "
|
||||
<< cloned_bprop_fg->parameters().size() << ".\n"
|
||||
<< trace::GetDebugInfo(cloned_bprop_fg->debug_info());
|
||||
}
|
||||
AnfNodePtr bout = BuildOutput(cloned_bprop_fg, current_primal_fg);
|
||||
cloned_bprop_fg->set_output(bout);
|
||||
|
||||
FuncGraphPtr outer = nullptr;
|
||||
{
|
||||
TraceGuard guard(std::make_shared<TraceGradFprop>(debug_info));
|
||||
auto outer_debug_info = std::make_shared<GraphDebugInfo>();
|
||||
outer_debug_info->set_name(primal->ToString());
|
||||
TraceGuard guard(std::make_shared<TraceGradFprop>(outer_debug_info));
|
||||
outer = std::make_shared<FuncGraph>();
|
||||
(void)outer->transforms().emplace("primal", FuncGraphTransform(primal));
|
||||
outer->set_output(NewValueNode(kNone));
|
||||
|
|
|
@ -407,7 +407,9 @@ FuncGraphPtr KPrim::KPrimitive(const CNodePtr &cnode, const ValueNodePtr &value_
|
|||
FuncGraphPtr bprop_fg = nullptr;
|
||||
if (prim->Hash() == prim::kPrimHookBackward->Hash() && prim->name() == prim::kPrimHookBackward->name()) {
|
||||
if (MsContext::GetInstance()->get_param<int>(MsCtxParam::MS_CTX_EXECUTION_MODE) == kGraphMode) {
|
||||
MS_LOG(EXCEPTION) << "HookBackward is not supported in graph mode.";
|
||||
MS_LOG(EXCEPTION)
|
||||
<< "The Primitive 'HookBackward' is not supported in graph mode, which is only supported in pynative mode.\n"
|
||||
<< trace::GetDebugInfo(cnode->debug_info());
|
||||
}
|
||||
bprop_fg = BpropCut(value_node, resources);
|
||||
} else {
|
||||
|
@ -478,9 +480,10 @@ AnfNodePtr KPrim::BuildOutput(const FuncGraphPtr &bprop_fg, const FuncGraphPtr &
|
|||
} else if (HasAbstractIOMonad(primal_node)) {
|
||||
extra_node = NewValueNode(kIOMonad);
|
||||
} else {
|
||||
MS_EXCEPTION(TypeError) << "Function: " << current_primal_fg->ToString()
|
||||
<< ", has extra parameter which is not UMoand or IOMonad, but: "
|
||||
<< primal_node->DebugString();
|
||||
MS_EXCEPTION(TypeError)
|
||||
<< "The params of function 'bprop' of Primitive or Cell requires the forward inputs as well "
|
||||
"as the 'out' and 'dout'.\n"
|
||||
<< trace::GetDebugInfo(bprop_fg->debug_info());
|
||||
}
|
||||
extra_args.push_back(extra_node);
|
||||
MS_LOG(DEBUG) << "Insert to bprop_fg for node: " << primal_node->DebugString();
|
||||
|
|
|
@ -319,11 +319,14 @@ class GradOperation(GradOperation_):
|
|||
def __init__(self, get_all=False, get_by_list=False, sens_param=False):
|
||||
"""Initialize GradOperation."""
|
||||
if not isinstance(get_all, bool):
|
||||
raise TypeError(f"For 'GradOperation', the 'get_all' should be bool, but got {type(get_all)}")
|
||||
raise TypeError(f"For 'GradOperation', the arg 'get_all' should be bool, but got {get_all} with type "
|
||||
f"{type(get_all)}")
|
||||
if not isinstance(get_by_list, bool):
|
||||
raise TypeError(f"For 'GradOperation', the 'get_by_list' should be bool, but got {type(get_by_list)}")
|
||||
raise TypeError(f"For 'GradOperation', the arg 'get_by_list' should be bool, but got {get_by_list} with "
|
||||
f"type {type(get_by_list)}")
|
||||
if not isinstance(sens_param, bool):
|
||||
raise TypeError(f"For 'GradOperation', the 'sens_param' should be bool, but got {type(sens_param)}")
|
||||
raise TypeError(f"For 'GradOperation', the arg 'sens_param' should be bool, but got {sens_param} with type "
|
||||
f"{type(sens_param)}")
|
||||
self.get_all = get_all
|
||||
self.get_by_list = get_by_list
|
||||
self.sens_param = sens_param
|
||||
|
|
|
@ -361,3 +361,258 @@ def test_same_primal_used_by_multi_j_with_monad2():
|
|||
net = AdamNet(var, m, v)
|
||||
grad_net = AdamGradNet(net)
|
||||
grad_net(beta1_power, beta2_power, lr, beta1, beta2, epsilon, grad)
|
||||
|
||||
|
||||
def test_grad_args_type_error1():
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.matmul = P.MatMul()
|
||||
def construct(self, x, y):
|
||||
out = self.matmul(x, y)
|
||||
return out
|
||||
|
||||
class GradNetWrtX(nn.Cell):
|
||||
def __init__(self, net):
|
||||
super(GradNetWrtX, self).__init__()
|
||||
self.net = net
|
||||
self.grad_op = ops.GradOperation(get_all=2)
|
||||
def construct(self, x, y):
|
||||
gradient_function = self.grad_op(self.net)
|
||||
return gradient_function(x, y)
|
||||
|
||||
x = Tensor(np.array([2.0], dtype=np.float32))
|
||||
y = Tensor(np.array([2.0], dtype=np.float32))
|
||||
try:
|
||||
GradNetWrtX(Net())(x, y)
|
||||
except TypeError as e:
|
||||
assert "For 'GradOperation', the arg 'get_all' should be bool, but got" in str(e)
|
||||
|
||||
|
||||
def test_grad_args_type_error2():
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.matmul = P.MatMul()
|
||||
def construct(self, x, y):
|
||||
out = self.matmul(x, y)
|
||||
return out
|
||||
|
||||
class GradNetWrtX(nn.Cell):
|
||||
def __init__(self, net):
|
||||
super(GradNetWrtX, self).__init__()
|
||||
self.net = net
|
||||
self.grad_op = ops.GradOperation(get_by_list=2)
|
||||
def construct(self, x, y):
|
||||
gradient_function = self.grad_op(self.net)
|
||||
return gradient_function(x, y)
|
||||
|
||||
x = Tensor(np.array([2.0], dtype=np.float32))
|
||||
y = Tensor(np.array([2.0], dtype=np.float32))
|
||||
try:
|
||||
GradNetWrtX(Net())(x, y)
|
||||
except TypeError as e:
|
||||
assert "For 'GradOperation', the arg 'get_by_list' should be bool, but got" in str(e)
|
||||
|
||||
|
||||
def test_grad_args_type_error3():
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.matmul = P.MatMul()
|
||||
def construct(self, x, y):
|
||||
out = self.matmul(x, y)
|
||||
return out
|
||||
|
||||
class GradNetWrtX(nn.Cell):
|
||||
def __init__(self, net):
|
||||
super(GradNetWrtX, self).__init__()
|
||||
self.net = net
|
||||
self.grad_op = ops.GradOperation(sens_param=2)
|
||||
def construct(self, x, y):
|
||||
gradient_function = self.grad_op(self.net)
|
||||
return gradient_function(x, y)
|
||||
|
||||
x = Tensor(np.array([2.0], dtype=np.float32))
|
||||
y = Tensor(np.array([2.0], dtype=np.float32))
|
||||
try:
|
||||
GradNetWrtX(Net())(x, y)
|
||||
except TypeError as e:
|
||||
assert "For 'GradOperation', the arg 'sens_param' should be bool, but got" in str(e)
|
||||
|
||||
|
||||
def test_grad_net_is_none():
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.add = P.Add()
|
||||
def construct(self, x, y):
|
||||
out = self.add(x, y)
|
||||
return out
|
||||
|
||||
class GradNetWrtX(nn.Cell):
|
||||
def __init__(self, net):
|
||||
super(GradNetWrtX, self).__init__()
|
||||
self.net = P.Add()
|
||||
self.grad_op = ops.GradOperation()
|
||||
def construct(self, x, y):
|
||||
gradient_function = self.grad_op(None)
|
||||
return gradient_function(x, y)
|
||||
|
||||
x = Tensor(np.array([2.0], dtype=np.float32))
|
||||
y = Tensor(np.array([2.0], dtype=np.float32))
|
||||
try:
|
||||
GradNetWrtX(Net())(x, y)
|
||||
except Exception as e:
|
||||
assert "'GradOperation' arg0 must be a 'Function' or 'Cell', but got" in str(e)
|
||||
|
||||
|
||||
def test_grad_missing_net():
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.add = P.Add()
|
||||
def construct(self, x, y):
|
||||
out = self.add(x, y)
|
||||
return out
|
||||
|
||||
class GradNetWrtX(nn.Cell):
|
||||
def __init__(self, net):
|
||||
super(GradNetWrtX, self).__init__()
|
||||
self.net = net
|
||||
self.grad_op = ops.GradOperation()
|
||||
def construct(self, x, y):
|
||||
gradient_function = self.grad_op()
|
||||
return gradient_function(x, y)
|
||||
|
||||
x = Tensor(np.array([2.0], dtype=np.float32))
|
||||
y = Tensor(np.array([2.0], dtype=np.float32))
|
||||
try:
|
||||
GradNetWrtX(Net())(x, y)
|
||||
except Exception as e:
|
||||
assert "'GradOperation' requires a forward network or function as an input, while the input is empty." in str(e)
|
||||
|
||||
|
||||
def test_user_defined_bprop_inputs_size_error():
|
||||
class BpropUserDefinedNet(nn.Cell):
|
||||
def __init__(self):
|
||||
super(BpropUserDefinedNet, self).__init__()
|
||||
self.zeros_like = P.ZerosLike()
|
||||
|
||||
def construct(self, x, y):
|
||||
return x + y
|
||||
|
||||
def bprop(self, out):
|
||||
return self.zeros_like(out), self.zeros_like(out)
|
||||
|
||||
class BpropUserDefinedGradNet(nn.Cell):
|
||||
def __init__(self, net):
|
||||
super(BpropUserDefinedGradNet, self).__init__()
|
||||
self.net = net
|
||||
|
||||
def construct(self, x, y):
|
||||
return grad_all(self.net)(x, y)
|
||||
|
||||
net = BpropUserDefinedNet()
|
||||
grad_net = BpropUserDefinedGradNet(net)
|
||||
x = Tensor(np.array([2.0], dtype=np.float32))
|
||||
y = Tensor(np.array([2.0], dtype=np.float32))
|
||||
try:
|
||||
grad_net(x, y)
|
||||
except Exception as e:
|
||||
assert "The function 'bprop' of Primitive or Cell requires at least 2 params 'out' and 'dout', but got only"\
|
||||
in str(e)
|
||||
|
||||
|
||||
def test_user_defined_bprop_net_has_parameter():
|
||||
class BpropUserDefinedNet(nn.Cell):
|
||||
def __init__(self):
|
||||
super(BpropUserDefinedNet, self).__init__()
|
||||
self.zeros_like = P.ZerosLike()
|
||||
self.x = Parameter(Tensor(np.array([2.0], dtype=np.float32)), name="x")
|
||||
|
||||
def construct(self, y):
|
||||
return self.x + y
|
||||
|
||||
def bprop(self, y, out, dout):
|
||||
return (self.zeros_like(out),)
|
||||
|
||||
class BpropUserDefinedGradNet(nn.Cell):
|
||||
def __init__(self, net):
|
||||
super(BpropUserDefinedGradNet, self).__init__()
|
||||
self.net = net
|
||||
|
||||
def construct(self, y):
|
||||
return grad_all(self.net)(y)
|
||||
|
||||
net = BpropUserDefinedNet()
|
||||
grad_net = BpropUserDefinedGradNet(net)
|
||||
y = Tensor(np.array([2.0], dtype=np.float32))
|
||||
try:
|
||||
grad_net(y)
|
||||
except Exception as e:
|
||||
assert "The Cell with user defined 'bprop' function in scope" in str(e)
|
||||
assert "does not support Parameter data type." in str(e)
|
||||
|
||||
|
||||
def test_user_defined_bprop_inputs_size_error1():
|
||||
class BpropUserDefinedNet(nn.Cell):
|
||||
def __init__(self):
|
||||
super(BpropUserDefinedNet, self).__init__()
|
||||
self.zeros_like = P.ZerosLike()
|
||||
|
||||
def construct(self, x, y):
|
||||
return x + y
|
||||
|
||||
def bprop(self, x, y, out):
|
||||
return self.zeros_like(out), self.zeros_like(out)
|
||||
|
||||
class BpropUserDefinedGradNet(nn.Cell):
|
||||
def __init__(self, net):
|
||||
super(BpropUserDefinedGradNet, self).__init__()
|
||||
self.net = net
|
||||
|
||||
def construct(self, x, y):
|
||||
return grad_all(self.net)(x, y)
|
||||
|
||||
net = BpropUserDefinedNet()
|
||||
grad_net = BpropUserDefinedGradNet(net)
|
||||
x = Tensor(np.array([2.0], dtype=np.float32))
|
||||
y = Tensor(np.array([2.0], dtype=np.float32))
|
||||
try:
|
||||
grad_net(x, y)
|
||||
except TypeError as e:
|
||||
assert "The params of function 'bprop' of Primitive or Cell requires the forward inputs as well as the 'out' " \
|
||||
"and 'dout'." in str(e)
|
||||
|
||||
|
||||
def test_grad_hook():
|
||||
def var_hook_function(grad_out):
|
||||
assert grad_out[0].asnumpy().shape == (32, 120)
|
||||
|
||||
class Net(nn.Cell):
|
||||
def __init__(self):
|
||||
super(Net, self).__init__()
|
||||
self.add = P.Add()
|
||||
self.hook = P.HookBackward(var_hook_function)
|
||||
def construct(self, x, y):
|
||||
x = self.hook(x)
|
||||
out = self.add(x, y)
|
||||
return out
|
||||
|
||||
class GradNetWrtX(nn.Cell):
|
||||
def __init__(self, net):
|
||||
super(GradNetWrtX, self).__init__()
|
||||
self.net = net
|
||||
self.grad_op = ops.GradOperation()
|
||||
def construct(self, x, y):
|
||||
gradient_function = self.grad_op(self.net)
|
||||
return gradient_function(x, y)
|
||||
|
||||
x = Tensor(np.array([2.0], dtype=np.float32))
|
||||
y = Tensor(np.array([2.0], dtype=np.float32))
|
||||
try:
|
||||
GradNetWrtX(Net())(x, y)
|
||||
except Exception as e:
|
||||
assert "The Primitive 'HookBackward' is not supported in graph mode, which is only supported in pynative " \
|
||||
"mode." in str(e)
|
||||
|
|
Loading…
Reference in New Issue