!25822 [MS][LITE]check model inputs and outputs

Merge pull request !25822 from mengyuanli/optimize_init_time
This commit is contained in:
i-robot 2021-11-04 01:24:13 +00:00 committed by Gitee
commit 5b668b21bb
14 changed files with 63 additions and 47 deletions

View File

@ -335,7 +335,7 @@ std::vector<MSTensor> ModelImpl::GetOutputs() {
}
auto outputs = session_->GetOutputs();
if (outputs.empty()) {
MS_LOG(ERROR) << "The outputs of model is null.";
MS_LOG(ERROR) << "The output tensor name of this model is null.";
return empty;
}
if (names.size() != outputs.size()) {

View File

@ -223,7 +223,13 @@ int LiteModel::NodeVerify() const {
MS_LOG(ERROR) << "Index of node->output_indices_ is beyond size.";
return RET_ERROR;
}
if (std::any_of(node->output_indices_.begin(), node->output_indices_.end(), [&](const uint32_t &idx) {
return this->all_tensors_[idx]->nodeType() == NodeType_ValueNode &&
this->all_tensors_[idx]->data() != nullptr;
})) {
MS_LOG(ERROR) << "node output tensor node type is ValueNode, node name: " << node->name_;
return RET_ERROR;
}
if (IsPartialNode(node->primitive_, schema_version_)) {
auto subgraph_index = GetPartialGraphIndex(node->primitive_, schema_version_);
if (static_cast<uint32_t>(subgraph_index) >= subgraph_size) {
@ -279,6 +285,16 @@ bool LiteModel::ModelVerify() const {
return false;
}
if (this->input_indices_.empty()) {
MS_LOG(ERROR) << "Model does not have inputs.";
return false;
}
if (this->output_indices_.empty()) {
MS_LOG(ERROR) << "Model does not have outputs.";
return false;
}
auto all_tensors_size = this->all_tensors_.size();
for (auto input_index : this->input_indices_) {
if (input_index >= all_tensors_size) {

View File

@ -129,7 +129,7 @@ void ConstructModel(schema::MetaGraphT *meta_graph) {
/* tensors */
auto tensor0 = std::make_unique<mindspore::schema::TensorT>();
tensor0->nodeType = mindspore::lite::NodeType_ValueNode;
tensor0->nodeType = mindspore::lite::NodeType_Parameter;
tensor0->format = mindspore::schema::Format_NHWC;
tensor0->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor0->dims = {1, 2, 2, 1};
@ -137,7 +137,7 @@ void ConstructModel(schema::MetaGraphT *meta_graph) {
tensor0->name = "tensor0";
auto tensor1 = std::make_unique<mindspore::schema::TensorT>();
tensor1->nodeType = mindspore::lite::NodeType_ValueNode;
tensor1->nodeType = mindspore::lite::NodeType_Parameter;
tensor1->format = mindspore::schema::Format_NHWC;
tensor1->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor1->dims = {1, 2, 2, 1};
@ -145,7 +145,7 @@ void ConstructModel(schema::MetaGraphT *meta_graph) {
tensor1->name = "tensor1";
auto tensor2 = std::make_unique<mindspore::schema::TensorT>();
tensor2->nodeType = mindspore::lite::NodeType_ValueNode;
tensor2->nodeType = mindspore::lite::NodeType_Parameter;
tensor2->format = mindspore::schema::Format_NHWC;
tensor2->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor2->dims = {1, 2, 2, 1};
@ -153,7 +153,7 @@ void ConstructModel(schema::MetaGraphT *meta_graph) {
tensor2->name = "tensor2";
auto tensor3 = std::make_unique<mindspore::schema::TensorT>();
tensor3->nodeType = mindspore::lite::NodeType_ValueNode;
tensor3->nodeType = mindspore::lite::NodeType_Parameter;
tensor3->format = mindspore::schema::Format_NHWC;
tensor3->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor3->dims = {1, 2, 2, 1};
@ -161,7 +161,7 @@ void ConstructModel(schema::MetaGraphT *meta_graph) {
tensor3->name = "tensor3";
auto tensor4 = std::make_unique<mindspore::schema::TensorT>();
tensor4->nodeType = mindspore::lite::NodeType_ValueNode;
tensor4->nodeType = mindspore::lite::NodeType_Parameter;
tensor4->format = mindspore::schema::Format_NHWC;
tensor4->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor4->dims = {1, 2, 2, 1};

View File

@ -83,7 +83,7 @@ void CreateMultyModel1(mindspore::schema::MetaGraphT *meta_graph) {
/* tensors */
auto tensor0 = std::make_unique<mindspore::schema::TensorT>();
tensor0->nodeType = mindspore::lite::NodeType_ValueNode;
tensor0->nodeType = mindspore::lite::NodeType_Parameter;
tensor0->format = mindspore::schema::Format_NHWC;
tensor0->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor0->dims = {1, 1, 1, 1};
@ -91,7 +91,7 @@ void CreateMultyModel1(mindspore::schema::MetaGraphT *meta_graph) {
tensor0->name = "tensor0";
auto tensor1 = std::make_unique<mindspore::schema::TensorT>();
tensor1->nodeType = mindspore::lite::NodeType_ValueNode;
tensor1->nodeType = mindspore::lite::NodeType_Parameter;
tensor1->format = mindspore::schema::Format_NHWC;
tensor1->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor1->dims = {1, 1, 1, 1};
@ -99,7 +99,7 @@ void CreateMultyModel1(mindspore::schema::MetaGraphT *meta_graph) {
tensor1->name = "tensor1";
auto tensor2 = std::make_unique<mindspore::schema::TensorT>();
tensor2->nodeType = mindspore::lite::NodeType_ValueNode;
tensor2->nodeType = mindspore::lite::NodeType_Parameter;
tensor2->format = mindspore::schema::Format_NHWC;
tensor2->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor2->dims = {1, 1, 1, 1};
@ -107,7 +107,7 @@ void CreateMultyModel1(mindspore::schema::MetaGraphT *meta_graph) {
tensor2->name = "tensor2";
auto tensor3 = std::make_unique<mindspore::schema::TensorT>();
tensor3->nodeType = mindspore::lite::NodeType_ValueNode;
tensor3->nodeType = mindspore::lite::NodeType_Parameter;
tensor3->format = mindspore::schema::Format_NHWC;
tensor3->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor3->dims = {1, 1, 1, 1};
@ -115,7 +115,7 @@ void CreateMultyModel1(mindspore::schema::MetaGraphT *meta_graph) {
tensor3->name = "tensor3";
auto tensor4 = std::make_unique<mindspore::schema::TensorT>();
tensor4->nodeType = mindspore::lite::NodeType_ValueNode;
tensor4->nodeType = mindspore::lite::NodeType_Parameter;
tensor4->format = mindspore::schema::Format_NHWC;
tensor4->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor4->dims = {1, 1, 1, 1};
@ -187,7 +187,7 @@ void CreateMultyModel2(mindspore::schema::MetaGraphT *meta_graph) {
/* tensors */
auto tensor0 = std::make_unique<mindspore::schema::TensorT>();
tensor0->nodeType = mindspore::lite::NodeType_ValueNode;
tensor0->nodeType = mindspore::lite::NodeType_Parameter;
tensor0->format = mindspore::schema::Format_NHWC;
tensor0->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor0->dims = {1, 2, 2, 1};
@ -195,7 +195,7 @@ void CreateMultyModel2(mindspore::schema::MetaGraphT *meta_graph) {
tensor0->name = "tensor0";
auto tensor1 = std::make_unique<mindspore::schema::TensorT>();
tensor1->nodeType = mindspore::lite::NodeType_ValueNode;
tensor1->nodeType = mindspore::lite::NodeType_Parameter;
tensor1->format = mindspore::schema::Format_NHWC;
tensor1->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor1->dims = {1, 2, 2, 1};
@ -203,7 +203,7 @@ void CreateMultyModel2(mindspore::schema::MetaGraphT *meta_graph) {
tensor1->name = "tensor1";
auto tensor2 = std::make_unique<mindspore::schema::TensorT>();
tensor2->nodeType = mindspore::lite::NodeType_ValueNode;
tensor2->nodeType = mindspore::lite::NodeType_Parameter;
tensor2->format = mindspore::schema::Format_NHWC;
tensor2->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor2->dims = {1, 2, 2, 1};

View File

@ -49,12 +49,12 @@ TEST_F(SubGraphTest, RecursiveSubGraphTest) {
add_0->primitive->value.value = add_0_prim;
add_0->name = "Add0";
auto tensor_0 = std::make_unique<schema::TensorT>();
tensor_0->nodeType = lite::NodeType_ValueNode;
tensor_0->nodeType = lite::NodeType_Parameter;
tensor_0->format = schema::Format_NHWC;
tensor_0->dataType = TypeId::kNumberTypeFloat32;
tensor_0->dims = {1};
auto tensor_1 = std::make_unique<schema::TensorT>();
tensor_1->nodeType = lite::NodeType_ValueNode;
tensor_1->nodeType = lite::NodeType_Parameter;
tensor_1->format = schema::Format_NHWC;
tensor_1->dataType = TypeId::kNumberTypeFloat32;
tensor_1->dims = {1};

View File

@ -55,7 +55,7 @@ TEST_F(InferTest, TestConvNode) {
meta_graph->outputIndex = {2};
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 28, 28, 3};
@ -168,7 +168,7 @@ TEST_F(InferTest, TestAddNode) {
meta_graph->outputIndex = {2};
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 28, 28, 3};

View File

@ -161,7 +161,7 @@ TEST_F(TestRegistryCustomOp, TestCustomAdd) {
meta_graph->outputIndex = {2};
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 28, 28, 3};

View File

@ -449,7 +449,7 @@ TEST_F(TestGPURegistryCustomOp, TestGPUCustomAdd) {
meta_graph->outputIndex = {2};
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 28, 28, 3};

View File

@ -124,7 +124,7 @@ TEST_F(TestRegistry, TestAdd) {
meta_graph->outputIndex = {2};
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 28, 28, 3};

View File

@ -96,49 +96,49 @@ TEST_F(SchedulerTest, TestConstructSubGraphsTwoBranch) {
concat->name = "concat";
auto tensor0 = std::make_unique<mindspore::schema::TensorT>();
tensor0->nodeType = mindspore::lite::NodeType_ValueNode;
tensor0->nodeType = mindspore::lite::NodeType_Parameter;
tensor0->format = mindspore::schema::Format_NHWC;
tensor0->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor0->dims = {1, 16, 16, 4};
tensor0->offset = -1;
auto tensor1 = std::make_unique<mindspore::schema::TensorT>();
tensor1->nodeType = mindspore::lite::NodeType_ValueNode;
tensor1->nodeType = mindspore::lite::NodeType_Parameter;
tensor1->format = mindspore::schema::Format_NHWC;
tensor1->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor1->dims = {1, 16, 16, 2};
tensor1->offset = -1;
auto tensor2 = std::make_unique<mindspore::schema::TensorT>();
tensor2->nodeType = mindspore::lite::NodeType_ValueNode;
tensor2->nodeType = mindspore::lite::NodeType_Parameter;
tensor2->format = mindspore::schema::Format_NHWC;
tensor2->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor2->dims = {1, 16, 16, 2};
tensor2->offset = -1;
auto tensor3 = std::make_unique<mindspore::schema::TensorT>();
tensor3->nodeType = mindspore::lite::NodeType_ValueNode;
tensor3->nodeType = mindspore::lite::NodeType_Parameter;
tensor3->format = mindspore::schema::Format_NHWC;
tensor3->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor3->dims = {1, 16, 16, 2};
tensor3->offset = -1;
auto tensor4 = std::make_unique<mindspore::schema::TensorT>();
tensor4->nodeType = mindspore::lite::NodeType_ValueNode;
tensor4->nodeType = mindspore::lite::NodeType_Parameter;
tensor4->format = mindspore::schema::Format_NHWC;
tensor4->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor4->dims = {1, 16, 16, 2};
tensor4->offset = -1;
auto tensor5 = std::make_unique<mindspore::schema::TensorT>();
tensor5->nodeType = mindspore::lite::NodeType_ValueNode;
tensor5->nodeType = mindspore::lite::NodeType_Parameter;
tensor5->format = mindspore::schema::Format_NHWC;
tensor5->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor5->dims = {1, 16, 16, 2};
tensor5->offset = -1;
auto tensor6 = std::make_unique<mindspore::schema::TensorT>();
tensor6->nodeType = mindspore::lite::NodeType_ValueNode;
tensor6->nodeType = mindspore::lite::NodeType_Parameter;
tensor6->format = mindspore::schema::Format_NHWC;
tensor6->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor6->dims = {1, 16, 16, 2};
tensor6->offset = -1;
auto tensor7 = std::make_unique<mindspore::schema::TensorT>();
tensor7->nodeType = mindspore::lite::NodeType_ValueNode;
tensor7->nodeType = mindspore::lite::NodeType_Parameter;
tensor7->format = mindspore::schema::Format_NHWC;
tensor7->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor7->dims = {1, 16, 16, 4};
@ -257,67 +257,67 @@ TEST_F(SchedulerTest, TestConstructSubGraphsThreeBranch) {
concat->name = "concat";
auto tensor0 = std::make_unique<mindspore::schema::TensorT>();
tensor0->nodeType = mindspore::lite::NodeType_ValueNode;
tensor0->nodeType = mindspore::lite::NodeType_Parameter;
tensor0->format = mindspore::schema::Format_NHWC;
tensor0->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor0->dims = {1, 16, 16, 3};
tensor0->offset = -1;
auto tensor1 = std::make_unique<mindspore::schema::TensorT>();
tensor1->nodeType = mindspore::lite::NodeType_ValueNode;
tensor1->nodeType = mindspore::lite::NodeType_Parameter;
tensor1->format = mindspore::schema::Format_NHWC;
tensor1->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor1->dims = {1, 16, 16, 1};
tensor1->offset = -1;
auto tensor2 = std::make_unique<mindspore::schema::TensorT>();
tensor2->nodeType = mindspore::lite::NodeType_ValueNode;
tensor2->nodeType = mindspore::lite::NodeType_Parameter;
tensor2->format = mindspore::schema::Format_NHWC;
tensor2->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor2->dims = {1, 16, 16, 1};
tensor2->offset = -1;
auto tensor3 = std::make_unique<mindspore::schema::TensorT>();
tensor3->nodeType = mindspore::lite::NodeType_ValueNode;
tensor3->nodeType = mindspore::lite::NodeType_Parameter;
tensor3->format = mindspore::schema::Format_NHWC;
tensor3->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor3->dims = {1, 16, 16, 1};
tensor3->offset = -1;
auto tensor4 = std::make_unique<mindspore::schema::TensorT>();
tensor4->nodeType = mindspore::lite::NodeType_ValueNode;
tensor4->nodeType = mindspore::lite::NodeType_Parameter;
tensor4->format = mindspore::schema::Format_NHWC;
tensor4->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor4->dims = {1, 16, 16, 1};
tensor4->offset = -1;
auto tensor5 = std::make_unique<mindspore::schema::TensorT>();
tensor5->nodeType = mindspore::lite::NodeType_ValueNode;
tensor5->nodeType = mindspore::lite::NodeType_Parameter;
tensor5->format = mindspore::schema::Format_NHWC;
tensor5->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor5->dims = {1, 16, 16, 1};
tensor5->offset = -1;
auto tensor6 = std::make_unique<mindspore::schema::TensorT>();
tensor6->nodeType = mindspore::lite::NodeType_ValueNode;
tensor6->nodeType = mindspore::lite::NodeType_Parameter;
tensor6->format = mindspore::schema::Format_NHWC;
tensor6->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor6->dims = {1, 16, 16, 1};
tensor6->offset = -1;
auto tensor7 = std::make_unique<mindspore::schema::TensorT>();
tensor7->nodeType = mindspore::lite::NodeType_ValueNode;
tensor7->nodeType = mindspore::lite::NodeType_Parameter;
tensor7->format = mindspore::schema::Format_NHWC;
tensor7->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor7->dims = {1, 16, 16, 1};
tensor7->offset = -1;
auto tensor8 = std::make_unique<mindspore::schema::TensorT>();
tensor8->nodeType = mindspore::lite::NodeType_ValueNode;
tensor8->nodeType = mindspore::lite::NodeType_Parameter;
tensor8->format = mindspore::schema::Format_NHWC;
tensor8->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor8->dims = {1, 16, 16, 1};
tensor8->offset = -1;
auto tensor9 = std::make_unique<mindspore::schema::TensorT>();
tensor9->nodeType = mindspore::lite::NodeType_ValueNode;
tensor9->nodeType = mindspore::lite::NodeType_Parameter;
tensor9->format = mindspore::schema::Format_NHWC;
tensor9->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor9->dims = {1, 16, 16, 1};
tensor9->offset = -1;
auto tensor10 = std::make_unique<mindspore::schema::TensorT>();
tensor10->nodeType = mindspore::lite::NodeType_ValueNode;
tensor10->nodeType = mindspore::lite::NodeType_Parameter;
tensor10->format = mindspore::schema::Format_NHWC;
tensor10->dataType = mindspore::TypeId::kNumberTypeFloat32;
tensor10->dims = {1, 16, 16, 3};

View File

@ -99,7 +99,7 @@ MetaGraphTptr BuildGraph(schema::PrimitiveType conv_type, schema::ActivationType
// input 0: data
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 5, 5, 3};

View File

@ -99,7 +99,7 @@ MetaGraphTptr BuildGraph(schema::PrimitiveType conv_type, schema::PrimitiveType
// input 0: data
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 5, 5, 3};

View File

@ -97,7 +97,7 @@ MetaGraphTptr BuildCaffeGraph(schema::PrimitiveType conv_type) {
// input 0: data
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 5, 5, 3};
@ -178,7 +178,7 @@ MetaGraphTptr BuildTFGraph(schema::PrimitiveType conv_type) {
// input 0: data
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 5, 5, 3};

View File

@ -114,7 +114,7 @@ MetaGraphTptr BuildGraph(schema::PrimitiveType conv_type, bool conv_with_bias) {
// input 0: data
auto input0 = std::make_unique<schema::TensorT>();
input0->nodeType = lite::NodeType_ValueNode;
input0->nodeType = lite::NodeType_Parameter;
input0->format = schema::Format_NHWC;
input0->dataType = TypeId::kNumberTypeFloat32;
input0->dims = {1, 5, 5, 3};
@ -133,7 +133,7 @@ MetaGraphTptr BuildGraph(schema::PrimitiveType conv_type, bool conv_with_bias) {
if (conv_with_bias) {
// input 00: bias
auto input00 = std::make_unique<schema::TensorT>();
input00->nodeType = lite::NodeType_ValueNode;
input00->nodeType = lite::NodeType_Parameter;
input00->format = schema::Format_NHWC;
input00->dataType = TypeId::kNumberTypeFloat32;
input00->dims = {1, 5, 5, 3};