add compatiblity

This commit is contained in:
xuanyue 2020-11-16 10:40:52 +08:00
parent 4530532f4a
commit 2be2cb3a96
12 changed files with 1459 additions and 45 deletions

View File

@ -76,7 +76,8 @@ if (PLATFORM_ARM64)
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.a DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/core/ir/dtype/type_id.h DESTINATION ${INC_DIR}/ir/dtype COMPONENT ${COMPONENT_NAME})
install(DIRECTORY ${TOP_DIR}/mindspore/lite/build/schema/ DESTINATION ${INC_DIR}/schema COMPONENT ${COMPONENT_NAME} FILES_MATCHING PATTERN "*.h" PATTERN "inner" EXCLUDE)
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/model_generated.h DESTINATION ${INC_DIR}/schema COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/ops_generated.h DESTINATION ${INC_DIR}/schema COMPONENT ${COMPONENT_NAME})
install(DIRECTORY ${flatbuffers_INC} DESTINATION ${FLATBF_DIR} COMPONENT ${COMPONENT_NAME})
if (ENABLE_TOOLS)
install(TARGETS benchmark RUNTIME DESTINATION ${MAIN_DIR}-${COMPONENT_NAME}/benchmark COMPONENT ${COMPONENT_NAME})
@ -90,7 +91,8 @@ elseif (PLATFORM_ARM32)
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.so DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/src/libmindspore-lite.a DESTINATION ${LIB_DIR} COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/core/ir/dtype/type_id.h DESTINATION ${INC_DIR}/ir/dtype COMPONENT ${COMPONENT_NAME})
install(DIRECTORY ${TOP_DIR}/mindspore/lite/build/schema/ DESTINATION ${INC_DIR}/schema COMPONENT ${COMPONENT_NAME} FILES_MATCHING PATTERN "*.h" PATTERN "inner" EXCLUDE)
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/model_generated.h DESTINATION ${INC_DIR}/schema COMPONENT ${COMPONENT_NAME})
install(FILES ${TOP_DIR}/mindspore/lite/build/schema/ops_generated.h DESTINATION ${INC_DIR}/schema COMPONENT ${COMPONENT_NAME})
install(DIRECTORY ${flatbuffers_INC} DESTINATION ${FLATBF_DIR} COMPONENT ${COMPONENT_NAME})
if (ENABLE_TOOLS)
install(TARGETS benchmark RUNTIME DESTINATION ${MAIN_DIR}-${COMPONENT_NAME}/benchmark COMPONENT ${COMPONENT_NAME})

View File

@ -93,10 +93,7 @@ include(${TOP_DIR}/cmake/dependency_utils.cmake)
include(${TOP_DIR}/cmake/dependency_securec.cmake)
include(${TOP_DIR}/cmake/external_libs/flatbuffers.cmake)
set(FBS_FILES
${CMAKE_CURRENT_SOURCE_DIR}/schema/model.fbs
${CMAKE_CURRENT_SOURCE_DIR}/schema/ops.fbs
)
file(GLOB FBS_FILES ${CMAKE_CURRENT_SOURCE_DIR}/schema/*.fbs)
ms_build_flatbuffers_lite(FBS_FILES ${CMAKE_CURRENT_SOURCE_DIR}/schema/ fbs_src ${CMAKE_BINARY_DIR}/schema "")
ms_build_flatbuffers_lite(FBS_FILES ${CMAKE_CURRENT_SOURCE_DIR}/schema/ fbs_inner_src ${CMAKE_BINARY_DIR}/schema/inner "inner")

View File

@ -18,6 +18,11 @@ include "ops.fbs";
namespace mindspore.schema;
// This corresponds to the version.
file_identifier "MSL1";
// File extension of any written files.
file_extension "ms";
enum NodeType: int {
ValueNode, // const
Parameter, // var

View File

@ -0,0 +1,282 @@
/**
* Copyright 2019 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
include "ops_v0.fbs";
namespace mindspore.schema.v0;
enum NodeType: int {
ValueNode, // const
Parameter, // var
CNode // op
}
table QuantParam {
scale: double;
zeroPoint: int;
min: double = 0;
max: double = 0;
narrowRange: bool = true;
numBits: int = 8;
inited: bool = false;
varCorr: float = 1;
meanCorr: float = 0;
dstDtype: int = 32;
}
table Tensor {
nodeType: NodeType;
// data type
dataType: int;
// shape
dims: [int];
format: Format;
refCount: int;
offset: int;
data: [ubyte];
quantParams: [QuantParam];
quantClusters: [float];
}
union PrimitiveType {
Concat,
SoftMax,
Activation,
Conv2D,
FusedBatchNorm,
BatchNorm,
BiasAdd,
Pooling,
ROIPooling,
DepthwiseConv2D,
DeDepthwiseConv2D,
Resize,
DetectionPostProcess,
FullConnection,
Mean,
DeConv2D,
Scale,
Reshape,
Eltwise,
NetOutput,
Add,
Sub,
MatMul,
StridedSlice,
Power,
Slice,
Stack,
Mul,
RealDiv,
Pad,
Maximum,
Minimum,
PReLU,
LeakyReLU,
ArgMax,
ArgMin,
Exp,
Crop,
Range,
Rsqrt,
ExpandDims,
Tile,
Cast,
Shape,
Nchw2Nhwc,
Nhwc2Nchw,
QuantDTypeCast,
Split,
Permute,
FakeQuantWithMinMaxVars,
Equal,
Less,
Greater,
NotEqual,
LessEqual,
GreaterEqual,
Min,
Floor,
Abs,
Neg,
Cos,
Sin,
Sqrt,
Square,
Constant,
Log,
Tan,
Atan,
Asin,
Clip,
Transpose,
Squeeze,
Unsqueeze,
Upsample,
Dropout,
Broadcast,
BroadcastTo,
Lrn,
ZerosLike,
TopK,
SpaceToDepth,
SpaceToBatch,
SparseToDense,
ReverseSequence,
Rank,
Gather,
GatherNd,
Fill,
Elu,
DepthToSpace,
BatchToSpace,
AddN,
Ceil,
EmbeddingLookup,
EmbeddingLookupSparse,
FloorDiv,
FloorMod,
L2Norm,
LocalResponseNormalization,
MatrixDiag,
Reduce,
Reverse,
Round,
Select,
Scatter,
ScatterND,
ConstantOfShape,
Unique,
Unstack,
LogicalAnd,
LogicalOr,
LogicalXor,
LogicalNot,
OnnxInt8Quantize,
OnnxInt8Dequantize,
FakeQuantWithMinMax,
FakeQuantWithMinMaxPerChannel,
BatchNormFold,
MulFold,
AddFold,
SquaredDifference,
Flatten,
FlattenGrad,
TupleGetItem,
Div,
Where,
OneHot,
Lstm,
Conv2DGradFilter,
Conv2DGradInput,
PoolingGrad,
BNGrad,
Assign,
ApplyMomentum,
BiasGrad,
SoftmaxCrossEntropy,
AddGrad,
SubGrad,
MulGrad,
DivGrad,
PowerGrad,
ActivationGrad,
PriorBox,
SpaceToBatchND,
Depend,
Return,
MakeTuple,
ToFormat,
Proposal,
Custom,
BlackBox,
NegGrad,
LogGrad,
BatchToSpaceND,
LshProjection,
HashtableLookup,
SkipGram,
DeConv2DGradFilter,
CustomPredict,
CustomNormalize,
CustomExtractFeatures,
AudioSpectrogram,
Mfcc,
Rfft,
FftReal,
FftImag,
Sgd,
Adam,
GroupConv2DGradInput,
Loop,
NonMaxSuppression,
InstanceNorm,
Identity,
LayerNorm,
While,
ControlDepend,
UnsortedSegmentSum,
AssignAdd,
OnesLike,
BinaryCrossEntropyGrad,
BinaryCrossEntropy,
LpNormalization,
DropoutGrad,
MaximumGrad,
MinimumGrad
}
enum QuantType: int {
QUANT_NONE,
AwareTraining,
WeightQuant,
PostTraining
}
table Primitive {
value: PrimitiveType;
}
table CNode {
name: string;
nodeType: NodeType = CNode;
primitive: Primitive;
inputIndex: [uint];
outputIndex: [uint];
quantType: QuantType = QUANT_NONE;
}
table SubGraph {
name:string;
inputIndices: [uint];
outputIndices: [uint];
nodeIndices: [uint];
tensorIndices: [uint];
}
table MetaGraph {
name: string;
version: string;
fmkType: int; // 0:tf,1:caffe
inputIndex: [uint];
outputIndex: [uint];
mempoolSize: uint;
nodes: [CNode];
allTensors: [Tensor]; // weight + input + output
subGraph : [SubGraph];
}
root_type MetaGraph;

File diff suppressed because it is too large Load Diff

View File

@ -32,7 +32,7 @@ enum CHWK_SHAPE { CHWK_C = 0, CHWK_H = 1, CHWK_W = 2, CHWK_K = 3 };
enum KHWC_SHAPE { KHWC_K = 0, KHWC_H = 1, KHWC_W = 2, KHWC_C = 3 };
enum CHW_SHAPE { CHW_C = 0, CHW_H = 1, CHW_W = 2 };
enum HWC_SHAPE { HWC_H = 0, HWC_W = 1, HWC_C = 2 };
enum SCHEMA_VERSION { SCHEMA_CUR = 0 };
enum SCHEMA_VERSION { SCHEMA_INVALID = -1, SCHEMA_CUR = 0, SCHEMA_V0 = 1 };
static constexpr int kNCHWDimNumber = 4;
static constexpr int kNHWCDimNumber = 4;

View File

@ -51,14 +51,18 @@ int ConvertSubGraph(const schema::SubGraph &sub_graph, Model *model) {
int VersionVerify(flatbuffers::Verifier *verify) {
if (schema::VerifyMetaGraphBuffer(*verify)) {
return SCHEMA_VERSION::SCHEMA_CUR;
} else if (schema::v0::VerifyMetaGraphBuffer(*verify)) {
return SCHEMA_VERSION::SCHEMA_V0;
}
return -1;
return SCHEMA_VERSION::SCHEMA_INVALID;
}
const void *GetMetaGraphByVerison(const char *buf, const int &schema_version) {
MS_ASSERT(buf != nullptr);
if (schema_version == SCHEMA_VERSION::SCHEMA_CUR) {
return reinterpret_cast<const void *>(schema::GetMetaGraph(buf));
} else if (schema_version == SCHEMA_VERSION::SCHEMA_V0) {
return reinterpret_cast<const void *>(schema::v0::GetMetaGraph(buf));
}
return nullptr;
}
@ -69,6 +73,9 @@ int GenerateModelByVersion(const void *meta_graph, Model *model, const int &sche
if (schema_version == SCHEMA_VERSION::SCHEMA_CUR) {
status = GenerateModel<schema::MetaGraph, schema::CNode>(*reinterpret_cast<const schema::MetaGraph *>(meta_graph),
model, schema_version);
} else if (schema_version == SCHEMA_VERSION::SCHEMA_V0) {
status = GenerateModel<schema::v0::MetaGraph, schema::v0::CNode>(
*reinterpret_cast<const schema::v0::MetaGraph *>(meta_graph), model, schema_version);
}
return status;
}

View File

@ -22,6 +22,7 @@
#include "include/model.h"
#include "include/version.h"
#include "schema/model_generated.h"
#include "schema/model_v0_generated.h"
#include "src/common/common.h"
#ifndef PRIMITIVE_WRITEABLE
#include "src/ops/ops_register.h"

View File

@ -28,7 +28,8 @@ TrainModel *TrainModel::Import(const char *model_buf, size_t size) {
return nullptr;
}
flatbuffers::Verifier verify((const uint8_t *)model_buf, size);
if (!schema::VerifyMetaGraphBuffer(verify)) {
int schema_version = VersionVerify(&verify);
if (schema_version == -1) {
MS_LOG(ERROR) << "The buffer is invalid and fail to create graph.";
return nullptr;
}
@ -45,49 +46,19 @@ TrainModel *TrainModel::Import(const char *model_buf, size_t size) {
}
memcpy(model->buf, model_buf, size);
model->buf_size_ = size;
auto meta_graph = schema::GetMetaGraph(model->buf);
const void *meta_graph = GetMetaGraphByVerison(model->buf, schema_version);
if (meta_graph == nullptr) {
delete model;
MS_LOG(ERROR) << "meta_graph is nullptr!";
delete (model);
return nullptr;
}
if (meta_graph->name() != nullptr) {
model->name_ = meta_graph->name()->c_str();
}
if (meta_graph->version() != nullptr) {
model->version_ = meta_graph->version()->c_str();
}
if (!ConvertNodes(*meta_graph, model)) {
delete model;
int status = GenerateModelByVersion(meta_graph, model, schema_version);
if (status != RET_OK) {
delete (model);
MS_LOG(ERROR) << "fail to generate model";
return nullptr;
}
if (!ConvertTensors(*meta_graph, model)) {
delete model;
return nullptr;
}
if (meta_graph->subGraph() == nullptr) {
int ret = MetaGraphMappingSubGraph(*meta_graph, model);
if (ret != RET_OK) {
MS_LOG(ERROR) << "converter old version model wrong.";
delete model;
return nullptr;
}
} else {
auto sub_graphs = meta_graph->subGraph();
auto sub_graph_size = sub_graphs->size();
for (size_t i = 0; i < sub_graph_size; i++) {
auto sub_graph = sub_graphs->GetAs<schema::SubGraph>(i);
int ret = ConvertSubGraph(*sub_graph, model);
if (ret != RET_OK) {
MS_LOG(ERROR) << "converter subgraph wrong.";
delete model;
return nullptr;
}
}
}
return model;
}

View File

@ -355,6 +355,7 @@ TEST_F(NetworkTest, tuning_layer) {
flatbuffers::FlatBufferBuilder builder(1024);
auto offset = schema::MetaGraph::Pack(builder, meta_graph.get());
builder.Finish(offset);
schema::FinishMetaGraphBuffer(builder, offset);
size_t size = builder.GetSize();
const char *content = reinterpret_cast<char *>(builder.GetBufferPointer());
std::cout << "build fb size= " << size << std::endl;

View File

@ -165,6 +165,7 @@ TEST_F(SchedulerTest, TestConstructSubGraphsTwoBranch) {
flatbuffers::FlatBufferBuilder builder(1024);
auto offset = mindspore::schema::MetaGraph::Pack(builder, meta_graph.get());
builder.Finish(offset);
mindspore::schema::FinishMetaGraphBuffer(builder, offset);
size_t size = builder.GetSize();
const char *content = reinterpret_cast<char *>(builder.GetBufferPointer());
auto model = mindspore::lite::Model::Import(content, size);
@ -349,6 +350,7 @@ TEST_F(SchedulerTest, TestConstructSubGraphsThreeBranch) {
flatbuffers::FlatBufferBuilder builder(1024);
auto offset = mindspore::schema::MetaGraph::Pack(builder, meta_graph.get());
builder.Finish(offset);
mindspore::schema::FinishMetaGraphBuffer(builder, offset);
size_t size = builder.GetSize();
const char *content = reinterpret_cast<char *>(builder.GetBufferPointer());
auto model = mindspore::lite::Model::Import(content, size);

View File

@ -27,6 +27,7 @@ int Storage::Save(const schema::MetaGraphT &graph, const std::string &outputPath
flatbuffers::FlatBufferBuilder builder(1024);
auto offset = schema::MetaGraph::Pack(builder, &graph);
builder.Finish(offset);
schema::FinishMetaGraphBuffer(builder, offset);
int size = builder.GetSize();
auto content = builder.GetBufferPointer();
if (content == nullptr) {