!9264 export script for resnet50_quant and densenet121,fix centerface and transformaer bug

From: @yuzhenhua666
Reviewed-by: @linqingke,@yingjy
Signed-off-by: @linqingke
This commit is contained in:
mindspore-ci-bot 2020-12-01 17:42:02 +08:00 committed by Gitee
commit 510ed65300
4 changed files with 124 additions and 7 deletions

View File

@ -20,14 +20,14 @@ import mindspore
from mindspore import context, Tensor
from mindspore.train.serialization import load_checkpoint, load_param_into_net, export
from src.centerface import CenterfaceMobilev2
from src.centerface import CenterfaceMobilev2, CenterFaceWithNms
from src.config import ConfigCenterface
parser = argparse.ArgumentParser(description='centerface export')
parser.add_argument("--device_id", type=int, default=0, help="Device id")
parser.add_argument("--batch_size", type=int, default=1, help="batch size")
parser.add_argument("--ckpt_file", type=str, required=True, help="Checkpoint file path.")
parser.add_argument("--file_name", type=str, default="centerface.air", help="output file name.")
parser.add_argument("--file_name", type=str, default="centerface", help="output file name.")
parser.add_argument('--file_format', type=str, choices=["AIR", "ONNX", "MINDIR"], default='AIR', help='file format')
args = parser.parse_args()
@ -48,6 +48,7 @@ if __name__ == '__main__':
param_dict_new[key] = values
load_param_into_net(net, param_dict_new)
net = CenterFaceWithNms(net)
net.set_train(False)
input_data = Tensor(np.zeros([args.batch_size, 3, config.input_h, config.input_w]), mindspore.float32)

View File

@ -0,0 +1,57 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import argparse
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import context, Tensor
from mindspore.train.serialization import export, load_checkpoint, load_param_into_net
from src.network import DenseNet121
from src.config import config
parser = argparse.ArgumentParser(description="densenet121 export")
parser.add_argument("--device_id", type=int, default=0, help="Device id")
parser.add_argument("--batch_size", type=int, default=32, help="batch size")
parser.add_argument("--ckpt_file", type=str, required=True, help="Checkpoint file path.")
parser.add_argument("--file_name", type=str, default="densenet121", help="output file name.")
parser.add_argument("--file_format", type=str, choices=["AIR", "ONNX", "MINDIR"], default="AIR", help="file format")
args = parser.parse_args()
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=args.device_id)
if __name__ == "__main__":
network = DenseNet121(config.num_classes)
param_dict = load_checkpoint(args.ckpt_file)
param_dict_new = {}
for key, value in param_dict.items():
if key.startswith("moments."):
continue
elif key.startswith("network."):
param_dict_new[key[8:]] = value
else:
param_dict_new[key] = value
load_param_into_net(network, param_dict_new)
network.add_flags_recursive(fp16=True)
network.set_train(False)
shape = [int(args.batch_size), 3] + [int(config.image_size.split(",")[0]), int(config.image_size.split(",")[1])]
input_data = Tensor(np.zeros(shape), mstype.float32)
export(network, input_data, file_name=args.file_name, file_format=args.file_format)

View File

@ -0,0 +1,51 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import argparse
import numpy as np
from mindspore import context, Tensor
from mindspore.train.serialization import export, load_checkpoint, load_param_into_net
from mindspore.compression.quant import QuantizationAwareTraining
from src.config import config_quant
from modelsresnet_quant_manual import resnet50_quant
parser = argparse.ArgumentParser(description='resnet50_quant export')
parser.add_argument("--device_id", type=int, default=0, help="Device id")
parser.add_argument("--batch_size", type=int, default=1, help="batch size")
parser.add_argument("--img_size", type=int, default=224, help="image size")
parser.add_argument("--ckpt_file", type=str, required=True, help="Checkpoint file path.")
parser.add_argument("--file_name", type=str, default="resnet50_quant", help="output file name.")
parser.add_argument('--file_format', type=str, choices=["AIR", "ONNX", "MINDIR"], default='MINDIR', help='file format')
args = parser.parse_args()
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=args.device_id)
if __name__ == "__main__":
config = config_quant
network = resnet50_quant(class_num=config.class_num)
quantizer = QuantizationAwareTraining(bn_fold=True, per_channel=[True, False], symmetric=[True, False])
network = quantizer.quantize(network)
param_dict = load_checkpoint(args.ckpt_file)
load_param_into_net(network, param_dict)
network.set_train(False)
shape = [config.batch_size, 3] + [args.img_size, args.img_size]
input_data = Tensor(np.zeros(shape).astype(np.float32))
export(network, input_data, file_name=args.file_name, file_format=args.file_format)

View File

@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""export checkpoint file into air models"""
""" export checkpoint file into models"""
import argparse
import numpy as np
from mindspore import Tensor, context
@ -23,7 +24,14 @@ from src.transformer_model import TransformerModel
from src.eval_config import cfg, transformer_net_cfg
from eval import load_weights
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
parser = argparse.ArgumentParser(description='transformer export')
parser.add_argument("--device_id", type=int, default=0, help="Device id")
parser.add_argument("--batch_size", type=int, default=1, help="batch size")
parser.add_argument("--file_name", type=str, default="transformer", help="output file name.")
parser.add_argument('--file_format', type=str, choices=["AIR", "ONNX", "MINDIR"], default='AIR', help='file format')
args = parser.parse_args()
context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", device_id=args.device_id)
if __name__ == '__main__':
tfm_model = TransformerModel(config=transformer_net_cfg, is_training=False, use_one_hot_embeddings=False)
@ -31,9 +39,9 @@ if __name__ == '__main__':
parameter_dict = load_weights(cfg.model_file)
load_param_into_net(tfm_model, parameter_dict)
source_ids = Tensor(np.ones((1, 128)).astype(np.int32))
source_mask = Tensor(np.ones((1, 128)).astype(np.int32))
source_ids = Tensor(np.ones((args.batch_size, transformer_net_cfg.seq_length)).astype(np.int32))
source_mask = Tensor(np.ones((args.batch_size, transformer_net_cfg.seq_length)).astype(np.int32))
dec_len = transformer_net_cfg.max_decode_length
export(tfm_model, source_ids, source_mask, file_name="len" + str(dec_len) + ".air", file_format="AIR")
export(tfm_model, source_ids, source_mask, file_name=args.file_name + str(dec_len), file_format=args.file_format)