diff --git a/mindspore/ccsrc/pipeline/jit/parse/data_converter.cc b/mindspore/ccsrc/pipeline/jit/parse/data_converter.cc index 7607fdea19e..ee9abb319ac 100644 --- a/mindspore/ccsrc/pipeline/jit/parse/data_converter.cc +++ b/mindspore/ccsrc/pipeline/jit/parse/data_converter.cc @@ -52,6 +52,7 @@ FuncGraphPtr ConvertToBpropCut(const py::object &obj) { outputs.push_back(NewValueNode(fake_bprop)); py::object code_obj = py::getattr(bprop_func, "__code__"); + // Three parameters self, out and dout need to be excluded size_t inputs_num = py::cast(py::getattr(code_obj, "co_argcount")) - 3; for (size_t i = 0; i < inputs_num; ++i) { auto param = bprop_graph->add_parameter(); diff --git a/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc b/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc index ea461d66f7c..c65224df1f2 100644 --- a/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc +++ b/mindspore/ccsrc/pipeline/pynative/pynative_execute.cc @@ -2488,7 +2488,7 @@ void PynativeExecutor::MakeNestedCnode(const std::string &cell_id, const py::arg inputs.emplace_back(GetInput(args[i], false)); } if (newfg->parameters().size() > inputs_size) { - SetNestedWeigthsParam(newfg, cell_id, &inputs); + SetNestedWeightsParam(newfg, cell_id, &inputs); } auto out_id = GetId(out); auto cnode = graph_prev->NewCNode(inputs); @@ -2497,7 +2497,7 @@ void PynativeExecutor::MakeNestedCnode(const std::string &cell_id, const py::arg MS_LOG(DEBUG) << "Nested make cnode is " << cnode->DebugString(4); } -void PynativeExecutor::SetNestedWeigthsParam(const FuncGraphPtr &newfg, const std::string &cell_id, +void PynativeExecutor::SetNestedWeightsParam(const FuncGraphPtr &newfg, const std::string &cell_id, std::vector *inputs) { FuncGraphPtr forward_graph = nullptr; auto ic = std::find_if(cell_graph_list_.begin(), cell_graph_list_.end(), diff --git a/mindspore/ccsrc/pipeline/pynative/pynative_execute.h b/mindspore/ccsrc/pipeline/pynative/pynative_execute.h index 16dc576c153..2252b326b69 100644 --- a/mindspore/ccsrc/pipeline/pynative/pynative_execute.h +++ b/mindspore/ccsrc/pipeline/pynative/pynative_execute.h @@ -240,7 +240,7 @@ class PynativeExecutor : public std::enable_shared_from_this { void SetNestedTopGraph(const py::object &cell, const py::args &args, const std::string &cell_id); void MakeNestedCnode(const std::string &cell_id, const py::args &args, const ResourcePtr &resource, const py::object &out, bool has_sens); - void SetNestedWeigthsParam(const FuncGraphPtr &newfg, const std::string &cell_id, std::vector *inputs); + void SetNestedWeightsParam(const FuncGraphPtr &newfg, const std::string &cell_id, std::vector *inputs); bool MakeBpropNestedCnode(const py::object &cell, const py::object &out, const std::string &cell_id); // Hold graph(forward and grad) info diff --git a/mindspore/nn/cell.py b/mindspore/nn/cell.py index 6c85fcdf6bb..5e6b69deec6 100755 --- a/mindspore/nn/cell.py +++ b/mindspore/nn/cell.py @@ -49,7 +49,7 @@ class Cell(Cell_): The bprop implementation will receive a Tensor `dout` containing the gradient of the loss w.r.t. the output, and a Tensor `out` containing the forward result. The bprop needs to compute the gradient of the loss w.r.t. the inputs, gradient of the loss w.r.t. Parameter variables are not supported - currently. + currently. The bprop method must contain the self parameter. Args: auto_prefix (bool): Recursively generate namespaces. Default: True.