move primitive_type to ops

This commit is contained in:
chenjianping 2021-01-28 20:39:47 +08:00
parent 83a3758de0
commit db1d4512b8
5 changed files with 175 additions and 232 deletions

View File

@ -511,10 +511,6 @@ gen_fbs() {
if [[ "X${diff_ops}" != "X" ]]; then
cp ${BASEPATH}/mindspore/lite/build/tools/schema_gen/ops.fbs ${BASEPATH}/mindspore/lite/schema/
fi
diff_types=$(diff ${BASEPATH}/mindspore/lite/build/tools/schema_gen/primitive_type.fbs ${BASEPATH}/mindspore/lite/schema/primitive_type.fbs || true)
if [[ "X${diff_types}" != "X" ]]; then
cp ${BASEPATH}/mindspore/lite/build/tools/schema_gen/primitive_type.fbs ${BASEPATH}/mindspore/lite/schema/
fi
fi
fi
}

View File

@ -14,7 +14,7 @@
* limitations under the License.
*/
include "primitive_type.fbs";
include "ops.fbs";
namespace mindspore.schema;

View File

@ -17,6 +17,177 @@ include "ops_types.fbs";
namespace mindspore.schema;
union PrimitiveType {
Abs,
Activation,
ActivationGrad,
Adam,
AddFusion,
AdderFusion,
AddGrad,
AddN,
All,
ApplyMomentum,
ArgMaxFusion,
ArgMinFusion,
Assert,
Assign,
AssignAdd,
AudioSpectrogram,
AvgPoolFusion,
BatchNorm,
BatchNormGrad,
BatchToSpace,
BatchToSpaceND,
BiasAdd,
BinaryCrossEntropy,
BinaryCrossEntropyGrad,
BiasGrad,
BroadcastTo,
Cast,
Ceil,
Clip,
Concat,
ControlDepend,
Conv2DBackpropFilterFusion,
Conv2DBackpropInputFusion,
Conv2DFusion,
Conv2dTransposeFusion,
Cos,
ConstantOfShape,
Crop,
CustomExtractFeatures,
CustomNormalize,
CustomPredict,
DeConv2DGradFilter,
Depend,
DepthToSpace,
DetectionPostProcess,
DivFusion,
DivGrad,
Dropout,
DropoutGrad,
Elu,
Eltwise,
Equal,
EmbeddingLookupFusion,
ExpFusion,
ExpandDims,
FakeQuantWithMinMaxVars,
FakeQuantWithMinMaxVarsPerChannel,
FftReal,
FftImag,
Flatten,
FlattenGrad,
Floor,
FloorDiv,
FloorMod,
Fill,
FullConnection,
FusedBatchNorm,
Gather,
GatherNd,
Greater,
GreaterEqual,
HashtableLookup,
InstanceNorm,
LayerNormFusion,
LeakyRelu,
Less,
LessEqual,
Log,
LogGrad,
LogicalAnd,
LogicalNot,
LogicalOr,
LpNormalization,
Lrn,
LshProjection,
LSTM,
L2NormalizeFusion,
MatMul,
Maximum,
MaximumGrad,
MaxPoolFusion,
Merge,
Mfcc,
Minimum,
MinimumGrad,
Mod,
MulFusion,
MulGrad,
Neg,
NegGrad,
NotEqual,
NonMaxSuppression,
OneHot,
OnesLike,
PadFusion,
PartialFusion,
PoolingGrad,
PowerGrad,
PowFusion,
PriorBox,
PReLUFusion,
QuantDTypeCast,
Rank,
Range,
Reciprocal,
RealDiv,
ReduceFusion,
Reshape,
Resize,
ReverseSequence,
ReverseV2,
Rfft,
ROIPooling,
Round,
Rsqrt,
ScaleFusion,
ScatterNd,
SGD,
Shape,
SigmoidCrossEntropyWithLogits,
SigmoidCrossEntropyWithLogitsGrad,
Sin,
SkipGram,
SliceFusion,
SmoothL1Loss,
SmoothL1LossGrad,
Softmax,
SoftmaxCrossEntropyWithLogits,
SpaceToBatch,
SpaceToBatchND,
SpaceToDepth,
SparseSoftmaxCrossEntropy,
SparseToDense,
Split,
Sqrt,
Squeeze,
Square,
SquaredDifference,
Stack,
StridedSlice,
SubFusion,
SubGrad,
Switch,
TensorListFromTensor,
TensorListGetItem,
TensorListReserve,
TensorListSetItem,
TensorListStack,
TileFusion,
TopKFusion,
Transpose,
Unique,
UnsortedSegmentSum,
Unsqueeze,
Unstack,
While,
Where,
ZerosLike,
}
table Abs {
}

View File

@ -1,190 +0,0 @@
/**
*
* Copyright 2021 Huawei Technologies Co., Ltd
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
include "ops.fbs";
namespace mindspore.schema;
union PrimitiveType {
Abs,
Activation,
ActivationGrad,
Adam,
AddFusion,
AdderFusion,
AddGrad,
AddN,
All,
ApplyMomentum,
ArgMaxFusion,
ArgMinFusion,
Assert,
Assign,
AssignAdd,
AudioSpectrogram,
AvgPoolFusion,
BatchNorm,
BatchNormGrad,
BatchToSpace,
BatchToSpaceND,
BiasAdd,
BinaryCrossEntropy,
BinaryCrossEntropyGrad,
BiasGrad,
BroadcastTo,
Cast,
Ceil,
Clip,
Concat,
ControlDepend,
Conv2DBackpropFilterFusion,
Conv2DBackpropInputFusion,
Conv2DFusion,
Conv2dTransposeFusion,
Cos,
ConstantOfShape,
Crop,
CustomExtractFeatures,
CustomNormalize,
CustomPredict,
DeConv2DGradFilter,
Depend,
DepthToSpace,
DetectionPostProcess,
DivFusion,
DivGrad,
Dropout,
DropoutGrad,
Elu,
Eltwise,
Equal,
EmbeddingLookupFusion,
ExpFusion,
ExpandDims,
FakeQuantWithMinMaxVars,
FakeQuantWithMinMaxVarsPerChannel,
FftReal,
FftImag,
Flatten,
FlattenGrad,
Floor,
FloorDiv,
FloorMod,
Fill,
FullConnection,
FusedBatchNorm,
Gather,
GatherNd,
Greater,
GreaterEqual,
HashtableLookup,
InstanceNorm,
LayerNormFusion,
LeakyRelu,
Less,
LessEqual,
Log,
LogGrad,
LogicalAnd,
LogicalNot,
LogicalOr,
LpNormalization,
Lrn,
LshProjection,
LSTM,
L2NormalizeFusion,
MatMul,
Maximum,
MaximumGrad,
MaxPoolFusion,
Merge,
Mfcc,
Minimum,
MinimumGrad,
Mod,
MulFusion,
MulGrad,
Neg,
NegGrad,
NotEqual,
NonMaxSuppression,
OneHot,
OnesLike,
PadFusion,
PartialFusion,
PoolingGrad,
PowerGrad,
PowFusion,
PriorBox,
PReLUFusion,
QuantDTypeCast,
Rank,
Range,
Reciprocal,
RealDiv,
ReduceFusion,
Reshape,
Resize,
ReverseSequence,
ReverseV2,
Rfft,
ROIPooling,
Round,
Rsqrt,
ScaleFusion,
ScatterNd,
SGD,
Shape,
SigmoidCrossEntropyWithLogits,
SigmoidCrossEntropyWithLogitsGrad,
Sin,
SkipGram,
SliceFusion,
SmoothL1Loss,
SmoothL1LossGrad,
Softmax,
SoftmaxCrossEntropyWithLogits,
SpaceToBatch,
SpaceToBatchND,
SpaceToDepth,
SparseSoftmaxCrossEntropy,
SparseToDense,
Split,
Sqrt,
Squeeze,
Square,
SquaredDifference,
Stack,
StridedSlice,
SubFusion,
SubGrad,
Switch,
TensorListFromTensor,
TensorListGetItem,
TensorListReserve,
TensorListSetItem,
TensorListStack,
TileFusion,
TopKFusion,
Transpose,
Unique,
UnsortedSegmentSum,
Unsqueeze,
Unstack,
While,
Where,
ZerosLike,
}

View File

@ -26,40 +26,6 @@
namespace mindspore::lite {
using mindspore::lite::ops::SchemaRegisterImpl;
int GenPrimitiveTypeFbs(std::string path) {
if (access((path).c_str(), F_OK) == 0) {
chmod((path).c_str(), S_IWUSR);
}
std::ofstream output(path, std::ofstream::binary);
if (!output.is_open()) {
MS_LOG(ERROR) << "Can not open file: " << path;
return RET_ERROR;
}
std::string ns =
"/**\n *\n * Copyright 2021 Huawei Technologies Co., Ltd\n"
" * Licensed under the Apache License, Version 2.0 (the \"License\");\n"
" * you may not use this file except in compliance with the License.\n"
" * You may obtain a copy of the License at\n"
" *\n"
" * http://www.apache.org/licenses/LICENSE-2.0\n"
" *\n"
" * Unless required by applicable law or agreed to in writing, software\n"
" * distributed under the License is distributed on an \"AS IS\" BASIS,\n"
" * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n"
" * See the License for the specific language governing permissions and\n"
" * limitations under the License.\n"
" */\n"
"include \"ops.fbs\";\n\nnamespace mindspore.schema;\n\n";
output.write(ns.c_str(), ns.length());
SchemaRegisterImpl *instance = SchemaRegisterImpl::Instance();
std::string prim_type = instance->GetPrimTypeGenFunc()();
output.write(prim_type.c_str(), prim_type.length());
output.close();
chmod(path.c_str(), S_IRUSR);
return RET_OK;
}
int SchemaGen::Init() {
if (this->flags_ == nullptr) {
return RET_ERROR;
@ -71,9 +37,7 @@ int SchemaGen::Init() {
MS_LOG(ERROR) << "get instance fail!";
return RET_ERROR;
}
if (GenPrimitiveTypeFbs(flags_->export_path_ + "/primitive_type.fbs") != RET_OK) {
return RET_ERROR;
}
std::string path = flags_->export_path_ + "/ops.fbs";
if (access((path).c_str(), F_OK) == 0) {
chmod((path).c_str(), S_IWUSR);
@ -99,6 +63,8 @@ int SchemaGen::Init() {
" */\n"
"include \"ops_types.fbs\";\n\nnamespace mindspore.schema;\n\n";
output.write(ns.c_str(), ns.length());
std::string prim_type = instance->GetPrimTypeGenFunc()();
output.write(prim_type.c_str(), prim_type.length());
for (auto &&func : instance->GetAllOpDefCreateFuncs()) {
std::string &&str = func();
output.write(str.c_str(), str.length());