!46141 fix bug of parameter export

Merge pull request !46141 from lianliguang/fix-bug-of-parameter-export
This commit is contained in:
i-robot 2023-02-01 07:41:59 +00:00 committed by Gitee
commit 1c0867d48b
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
10 changed files with 145 additions and 75 deletions

View File

@ -185,7 +185,7 @@ AbstractBasePtr InferImplLower(const AnalysisEnginePtr &, const PrimitivePtr &pr
auto input = args_spec_list[0]->BuildValue();
if (input == nullptr || !input->isa<StringImm>()) {
auto type = args_spec_list[0]->BuildType();
MS_LOG(ERROR) << "Function lower should be call using a string type but got:" << type->ToString();
MS_EXCEPTION(TypeError) << "Function lower should be call using a string type but got:" << type->ToString();
}
auto str = input->cast<StringImmPtr>()->value();
std::transform(str.begin(), str.end(), str.begin(), ::tolower);

View File

@ -48,7 +48,7 @@ class ClassTypeResolve : public AnfVisitor {
}
private:
ValuePtr ConvertValueSequence(const ValuePtr &value, bool *need_convert);
ValuePtr ConvertValueSequence(const ValueSequencePtr &value, bool *need_convert);
ValuePtr ConvertValue(const ValuePtr &value, bool *need_convert);
};
ValuePtr ClassTypeResolve::ConvertValue(const ValuePtr &value, bool *need_convert) {
@ -62,6 +62,7 @@ ValuePtr ClassTypeResolve::ConvertValue(const ValuePtr &value, bool *need_conver
(*need_convert) = true;
return std::make_shared<MindIRNameSpace>(name_space);
}
if (value->isa<ValueDictionary>()) {
auto dic = value->cast<ValueDictionaryPtr>();
auto dic_pairs = dic->value();
@ -73,26 +74,24 @@ ValuePtr ClassTypeResolve::ConvertValue(const ValuePtr &value, bool *need_conver
return std::make_shared<ValueDictionary>(convert_dict);
}
}
if (value->isa<ValueSequence>()) {
return ConvertValueSequence(value, need_convert);
MS_EXCEPTION_IF_NULL(value);
auto seq_value = value->cast<ValueSequencePtr>();
return ConvertValueSequence(seq_value, need_convert);
}
return value;
}
ValuePtr ClassTypeResolve::ConvertValueSequence(const ValuePtr &value, bool *need_convert) {
MS_EXCEPTION_IF_NULL(value);
auto seq_value = value->cast<ValueSequencePtr>();
if (seq_value == nullptr) {
return nullptr;
}
ValuePtr ClassTypeResolve::ConvertValueSequence(const ValueSequencePtr &seq_value, bool *need_convert) {
auto vec_seq = std::vector<ValuePtr>();
for (size_t i = 0; i < seq_value->size(); ++i) {
(void)vec_seq.emplace_back(ConvertValue((*seq_value)[i], need_convert));
}
if (!need_convert) {
return value;
return seq_value;
}
if (value->isa<ValueTuple>()) {
if (seq_value->isa<ValueTuple>()) {
return std::make_shared<ValueTuple>(vec_seq);
}
return std::make_shared<ValueList>(vec_seq);

View File

@ -41,15 +41,6 @@ inline CNode *GetCallNode(const AnfNodePtr &node) {
return func_c_node;
}
inline bool IsGetAttrPrimNode(const AnfNodePtr &node) {
MS_EXCEPTION_IF_NULL(node);
auto primitive = GetValuePtr<Primitive>(node);
if (primitive == nullptr) {
return false;
}
return primitive->name() == "getattr";
}
inline Primitive *GetPrimNode(const AnfNodePtr &node) {
MS_EXCEPTION_IF_NULL(node);
auto primitive = GetValuePtr<Primitive>(node);
@ -94,9 +85,8 @@ bool ReslovePrimitiveAttr::IsCNodeMinIRMetaGraphGetItem(const AnfNodePtr &node)
if (meta_func->name() != "getitem") {
return false;
}
auto dict_node = cnode->input(1);
auto string_node = cnode->input(2);
auto dict_node = cnode->input(kIndex1);
auto string_node = cnode->input(kIndex2);
return IsStringAttrValueNode(string_node) && IsCallPrimitiveAttrDictNode(dict_node);
}
@ -109,7 +99,8 @@ bool ReslovePrimitiveAttr::IsGetAttrDictFuncNode(const CNode *node) {
auto prim_node = node->input(1);
auto attr_name_node = node->input(2);
primitive_ = GetPrimNode(prim_node);
return IsGetAttrPrimNode(attr_prim_node) && primitive_ != nullptr && IsGetAttrDictStringImmValueNode(attr_name_node);
return IsPrimitive(attr_prim_node, prim::kPrimGetAttr) && primitive_ != nullptr &&
IsGetAttrDictStringImmValueNode(attr_name_node);
}
} // namespace irpass
} // namespace opt

View File

@ -338,7 +338,7 @@ FuncGraphPtr Renormalize(const ResourcePtr &resource, const FuncGraphPtr &func_g
return res;
}
void SetLoadFlag(const ResourcePtr &resource) {
void SetMindIRLoadFlag(const ResourcePtr &resource) {
MS_EXCEPTION_IF_NULL(resource);
auto manager = resource->manager();
MS_EXCEPTION_IF_NULL(manager);
@ -749,7 +749,7 @@ bool AbstractSpecializeAction(const ResourcePtr &resource) {
MS_LOG(EXCEPTION) << "AbstractSpecialize error";
}
SetLoadFlag(resource);
SetMindIRLoadFlag(resource);
// Abstract analyze
auto engine = resource->engine();

View File

@ -935,11 +935,10 @@ AbstractBasePtr MakePyInferRes2Abstract(const py::object &output) {
} // namespace
py::tuple PreparePyInputs(const AbstractBasePtrList &args) {
// The monad parameter is defined at the end of the parameter and needs to be ignored
std::size_t size_args = args.size() - GetAbstractMonadNum(args);
py::tuple py_args(size_args);
for (size_t i = 0; i < size_args; i++) {
auto arg_i = (args)[i];
py_args[i] = ConvertAbstractToPython(arg_i);
std::size_t args_size = args.size() - GetAbstractMonadNum(args);
py::tuple py_args(args_size);
for (size_t i = 0; i < args_size; i++) {
py_args[i] = ConvertAbstractToPython(args[i]);
}
return py_args;
}

View File

@ -170,6 +170,7 @@ class IrExportBuilder {
private:
bool SetAbstractFuncToAttributeProto(const abstract::AbstractBasePtr &abstract,
mind_ir::AttributeProto *const attr_proto);
bool ExportWeight(const ParameterPtr &param, const std::string &param_name, mind_ir::GraphProto *const graph_proto);
std::string GetPrimitiveUniqueName(const PrimitivePtr &primitive_ptr);
bool BuildPrimitives();
@ -443,52 +444,54 @@ bool IrExportBuilder::BuildFuncGraphAttrs(const FuncGraphPtr &func_graph, mind_i
return true;
}
bool IrExportBuilder::ExportWeight(const ParameterPtr &param, const std::string &param_name,
mind_ir::GraphProto *const graph_proto) {
MS_LOG(DEBUG) << "Parameter: '" << param->DebugString();
auto param_abs = param->abstract();
MS_EXCEPTION_IF_NULL(param_abs);
if (param_abs->isa<abstract::AbstractMapTensor>()) {
auto *map_parameter_proto = graph_proto->add_map_parameter();
if (!ConvertMapParameterToMapTensorProto(param, map_parameter_proto)) {
MS_LOG(ERROR) << "Convert MapParameter " << param->ToString() << " to MapTensorProto failed.";
return false;
}
return true;
}
if (param_abs->isa<abstract::AbstractTensor>()) {
mind_ir::TensorProto *parameter_proto = graph_proto->add_parameter();
parameter_proto->set_name(param_name);
if (!SetParamToTensorProto(param, parameter_proto)) {
MS_LOG(ERROR) << "Set parameter " << param->DebugString() << " to TensorProto failed.";
return false;
}
return true;
}
MS_LOG(ERROR) << "Only support MapTensor or Tensor as default param of Parameter, got: "
<< param->default_param()->ToString();
return false;
}
bool IrExportBuilder::BuildParameters(const FuncGraphPtr &func_graph, mind_ir::GraphProto *const graph_proto) {
MS_EXCEPTION_IF_NULL(func_graph);
MS_EXCEPTION_IF_NULL(graph_proto);
for (auto &item : func_graph->parameters()) {
auto param_size = func_graph->parameters().size();
MS_LOG(DEBUG) << "func graph parameter num:" << param_size << ", fv param num:" << func_graph->fv_param_count();
for (size_t param_counter = 0; param_counter < param_size; ++param_counter) {
auto &item = func_graph->parameters()[param_counter];
MS_EXCEPTION_IF_NULL(item);
auto param = item->cast<ParameterPtr>();
if (param == nullptr) {
MS_LOG(ERROR) << "Parameter: '" << item->ToString() << "' could not cast to parameter.";
return false;
}
std::string param_name = GetUniqueNodeName(param);
if (top_graph && param->has_default()) {
MS_LOG(DEBUG) << "Parameter: '" << item->DebugString();
if (param->abstract()->isa<abstract::AbstractMapTensor>()) {
auto *map_parameter_proto = graph_proto->add_map_parameter();
if (!ConvertMapParameterToMapTensorProto(param, map_parameter_proto)) {
MS_LOG(ERROR) << "Convert MapParameter " << param->ToString() << " to MapTensorProto failed.";
return false;
}
} else if (param->abstract()->isa<abstract::AbstractTensor>()) {
mind_ir::TensorProto *parameter_proto = graph_proto->add_parameter();
parameter_proto->set_name(param_name);
if (!SetParamToTensorProto(param, parameter_proto)) {
MS_LOG(ERROR) << "Set parameter " << param->DebugString() << " to TensorProto failed.";
return false;
}
auto tensor = param->default_param()->cast<tensor::TensorPtr>();
if (tensor != nullptr) {
parameter_proto->set_compression_type(
static_cast<mind_ir::TensorProto_CompressionType>(tensor->compression_type()));
}
auto quant_params = tensor->quant_params();
for (size_t i = 0; i < quant_params.size(); i++) {
auto quant_param_proto = parameter_proto->add_quant_params();
auto ret = SetQuantizationParamToAttrProto(quant_params[i], quant_param_proto);
if (ret != true) {
MS_LOG(ERROR) << "QuantizationParam Set Value to AttributeProto Error";
return false;
}
}
} else {
MS_LOG(ERROR) << "Only support MapTensor or Tensor as default param of Parameter, got: "
<< param->default_param()->ToString();
return false;
if (top_graph && param_counter >= param_size - func_graph->fv_param_count()) {
if (!ExportWeight(param, param_name, graph_proto)) {
MS_LOG(ERROR) << "Failed to export parameter weight:" << param->DebugString();
}
} else {
// export graph input
mind_ir::ValueInfoProto *input_proto = graph_proto->add_input();
input_proto->set_name(param_name);
if (!SetValueInfoProto(param, input_proto)) {
@ -694,7 +697,25 @@ bool IrExportBuilder::SetParamToTensorProto(const ParameterPtr &param, mind_ir::
MS_LOG(EXCEPTION) << "Parameter or TensorProto is null!";
}
MS_LOG(DEBUG) << "SetParamToTensorProto: " << param->DebugString();
return SetTensorProto(param->abstract(), tensor_proto);
if (!SetTensorProto(param->abstract(), tensor_proto)) {
MS_LOG(ERROR) << "Export Parameter to tensor proto failed.";
return false;
}
// export quant parameter info
auto tensor = param->default_param()->cast<tensor::TensorPtr>();
if (tensor != nullptr) {
tensor_proto->set_compression_type(static_cast<mind_ir::TensorProto_CompressionType>(tensor->compression_type()));
}
auto quant_params = tensor->quant_params();
for (const auto &quant_param : quant_params) {
auto quant_param_proto = tensor_proto->add_quant_params();
auto ret = SetQuantizationParamToAttrProto(quant_param, quant_param_proto);
if (ret != true) {
MS_LOG(ERROR) << "QuantizationParam Set Value to AttributeProto Error";
return false;
}
}
return true;
}
bool IrExportBuilder::ConvertMapParameterToMapTensorProto(const ParameterPtr &map_parameter,

View File

@ -29,6 +29,18 @@
#include "ops/core_ops.h"
namespace mindspore {
namespace {
ParameterPtr AddObfuscatedParam(FuncGraphPtr func_graph) {
auto params = func_graph->parameters();
auto add_param = std::make_shared<Parameter>(func_graph);
std::vector<AnfNodePtr> new_para_list(params.begin(), params.begin() + params.size() - func_graph->fv_param_count());
(void)new_para_list.emplace_back(add_param);
new_para_list.insert(new_para_list.cend(), params.begin() + params.size() - func_graph->fv_param_count(),
params.end());
func_graph->set_parameters(new_para_list);
return add_param;
}
} // namespace
using Tensor = mindspore::tensor::Tensor;
using mindspore::abstract::AbstractTensor;
using mindspore::abstract::AbstractTensorPtr;
@ -299,10 +311,10 @@ CNodePtr DynamicObfuscator::PasswordModeControl(FuncGraphPtr func_graph) {
tensor::TensorPtr y_tensor = std::make_shared<Tensor>(mindspore::kNumberTypeInt32, y_shape);
if (!has_build_appended_input) {
MS_LOG(INFO) << "Build parameter y_password and y_append.";
auto y = func_graph->add_parameter();
auto y = AddObfuscatedParam(func_graph);
y->set_name("y_password");
y->set_abstract(y_tensor->ToAbstract());
auto y_append = func_graph->add_parameter();
auto y_append = AddObfuscatedParam(func_graph);
y_append->set_name("y_append");
y_append->set_abstract(y_tensor->ToAbstract());
has_build_appended_input = true;

View File

@ -317,6 +317,26 @@ STATUS ConverterFuncGraph::Optimize(const std::shared_ptr<ConverterPara> &param,
int ConverterFuncGraph::Save(const std::shared_ptr<ConverterPara> &param, const FuncGraphPtr &func_graph, void **buff,
size_t *size) {
mindspore::lite::MindIRSerializer serializer;
auto fv_count = 0;
std::vector<AnfNodePtr> params;
std::vector<AnfNodePtr> reorder_param;
reorder_param.reserve(func_graph->parameters().size());
for (const auto &node : func_graph->parameters()) {
auto param_node = node->cast<ParameterPtr>();
if (param_node == nullptr) {
MS_LOG(ERROR) << "The parameters() in func graph should be all Parameter Node. but got " << node->DebugString();
return RET_ERROR;
}
if (param_node->has_default()) {
(void)params.emplace_back(param_node);
++fv_count;
continue;
}
(void)reorder_param.emplace_back(param_node);
}
std::copy(params.begin(), params.end(), std::back_inserter(reorder_param));
func_graph->set_parameters(reorder_param);
func_graph->set_fv_param_count(fv_count);
auto ret = serializer.Save(param, func_graph);
if (ret != RET_OK) {
MS_LOG(ERROR) << "MindIR serialize fail";

View File

@ -1534,8 +1534,8 @@ def _save_mindir_together(net_dict, model, file_name, is_encrypt, **kwargs):
param_data = net_dict[param_name].data.asnumpy().tobytes()
param_proto.raw_data = param_data
else:
logger.warning("The parameter '{}' is not belongs to any cell,the data of parameter cannot be exported."
.format(param_proto.name))
raise ValueError("The parameter '{}' is not belongs to any cell,"
"the data of parameter cannot be exported.".format(param_proto.name))
incremental = kwargs.get('incremental', False)
for map_param_proto in model.graph.map_parameter:
map_param_name = map_param_proto.name[map_param_proto.name.find(":") + 1:]
@ -1546,8 +1546,8 @@ def _save_mindir_together(net_dict, model, file_name, is_encrypt, **kwargs):
map_param_proto.value_tensor.raw_data = value_nparr.tobytes()
map_param_proto.status_tensor.raw_data = status_nparr.tobytes()
else:
logger.warning("The map_parameter '{}' is not belongs to any cell,the data of parameter cannot be exported."
.format(map_param_proto.name))
raise ValueError("The map_parameter '{}' is not belongs to any cell,"
"the data of parameter cannot be exported.".format(map_param_proto.name))
if not file_name.endswith('.mindir'):
file_name += ".mindir"
current_path = os.path.abspath(file_name)
@ -1577,8 +1577,8 @@ def _save_together(net_dict, model):
if name in net_dict.keys():
data_total += sys.getsizeof(net_dict[name].data.asnumpy().tobytes()) / 1024
else:
logger.info("The parameter '{}' is not belongs to any cell,the data of parameter cannot be exported."
.format(param_proto.name))
raise ValueError("The parameter '{}' is not belongs to any cell,"
"the data of parameter cannot be exported.".format(param_proto.name))
if data_total > TOTAL_SAVE:
return False
return True

View File

@ -14,7 +14,7 @@
# ============================================================================
import numpy as np
from mindspore.nn import Cell, GraphCell
from mindspore import ops
from mindspore import ops, nn
from mindspore import Tensor, export, load, Parameter, dtype, context
@ -73,3 +73,31 @@ def test_mindir_export_none():
export(net, input_tensor, file_name="none_net", file_format='MINDIR')
graph = load("none_net.mindir")
assert graph is not None
def test_mindir_export_parameter_as_tensor():
"""
Feature: Test MindIR Export model
Description: test mindir export parameter as construct input
Expectation: No exception.
"""
input_np_x = np.random.randn(3).astype(np.float32)
input_np_x_param = Parameter(input_np_x)
class Net(Cell):
def __init__(self):
super(Net, self).__init__()
self.relu = nn.ReLU()
self.x = Parameter(Tensor(input_np_x))
def construct(self, x):
x = x + x
x = x * self.x
return x
context.set_context(mode=context.GRAPH_MODE)
net = Net()
out_net = net(input_np_x_param)
export(net, input_np_x_param, file_name="test", file_format="MINDIR")
graph = load("test.mindir")
net_mindir = nn.GraphCell(graph)
result_mindir = net_mindir(input_np_x_param)
assert np.allclose(result_mindir.asnumpy(), out_net.asnumpy(), 0.0001, 0.001, equal_nan=True)