add export for mobilenetv2, mobilenetv3, resnext50

This commit is contained in:
zhaoting 2020-10-22 11:31:20 +08:00
parent 84f70c8cf0
commit 318cd392d7
11 changed files with 214 additions and 27 deletions

View File

@ -10,6 +10,7 @@
- [Script and Sample Code](#script-and-sample-code)
- [Training Process](#training-process)
- [Evaluation Process](#eval-process)
- [Export MindIR](#export-mindir)
- [Model Description](#model-description)
- [Performance](#performance)
- [Training Performance](#training-performance)
@ -62,21 +63,22 @@ For FP16 operators, if the input data type is FP32, the backend of MindSpore wil
```python
├── MobileNetV2
├── README.md # descriptions about MobileNetV2
├── README.md # descriptions about MobileNetV2
├── scripts
│ ├──run_train.sh # shell script for train, fine_tune or incremental learn with CPU, GPU or Ascend
│ ├──run_eval.sh # shell script for evaluation with CPU, GPU or Ascend
│ ├──run_train.sh # shell script for train, fine_tune or incremental learn with CPU, GPU or Ascend
│ ├──run_eval.sh # shell script for evaluation with CPU, GPU or Ascend
├── src
│ ├──args.py # parse args
│ ├──config.py # parameter configuration
│ ├──dataset.py # creating dataset
│ ├──args.py # parse args
│ ├──config.py # parameter configuration
│ ├──dataset.py # creating dataset
│ ├──lr_generator.py # learning rate config
│ ├──mobilenetV2.py # MobileNetV2 architecture
│ ├──models.py # contain define_net and Loss, Monitor
│ ├──utils.py # utils to load ckpt_file for fine tune or incremental learn
├── train.py # training script
├── eval.py # evaluation script
├── mindspore_hub_conf.py # mindspore hub interface
│ ├──models.py # contain define_net and Loss, Monitor
│ ├──utils.py # utils to load ckpt_file for fine tune or incremental learn
├── train.py # training script
├── eval.py # evaluation script
├── export.py # export mindir script
├── mindspore_hub_conf.py # mindspore hub interface
```
## [Training process](#contents)
@ -172,6 +174,14 @@ Inference result will be stored in the example path, you can find result like th
result: {'acc': 0.71976314102564111} ckpt=./ckpt_0/mobilenet-200_625.ckpt
```
## [Export MindIR](#contents)
Change the export mode and export file in `src/config.py`, and run `export.py`.
```
python export.py --platform [PLATFORM] --pretrain_ckpt [CKPT_PATH]
```
# [Model description](#contents)
## [Performance](#contents)

View File

@ -0,0 +1,35 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
mobilenetv2 export mindir.
"""
import numpy as np
from mindspore import Tensor
from mindspore.train.serialization import export
from src.config import set_config
from src.args import export_parse_args
from src.models import define_net, load_ckpt
from src.utils import set_context
if __name__ == '__main__':
args_opt = export_parse_args()
cfg = set_config(args_opt)
set_context(cfg)
_, _, net = define_net(cfg, args_opt.is_training)
load_ckpt(net, args_opt.pretrain_ckpt)
input_shp = [1, 3, cfg.image_height, cfg.image_width]
input_array = Tensor(np.random.uniform(-1.0, 1.0, size=input_shp).astype(np.float32))
export(net, input_array, file_name=cfg.export_file, file_format=cfg.export_format)

View File

@ -31,14 +31,14 @@ run_ascend()
BASEPATH=$(cd "`dirname $0`" || exit; pwd)
export PYTHONPATH=${BASEPATH}:$PYTHONPATH
export RANK_TABLE_FILE=$4
DEVICE_NUM=$2
export RANK_SIZE=$2
if [ -d "../train" ];
then
rm -rf ../train
fi
mkdir ../train
cd ../train || exit
for((i=0; i<${DEVICE_NUM}; i++))
for((i=0; i<${RANK_SIZE}; i++))
do
export DEVICE_ID=$i
export RANK_ID=$i

View File

@ -61,4 +61,14 @@ def eval_parse_args():
eval_args = eval_parser.parse_args()
eval_args.is_training = False
return eval_args
def export_parse_args():
export_parser = argparse.ArgumentParser(description='Image classification export')
export_parser.add_argument('--platform', type=str, default="Ascend", choices=("Ascend", "GPU", "CPU"), \
help='run platform, only support GPU, CPU and Ascend')
export_parser.add_argument('--pretrain_ckpt', type=str, required=True, help='Pretrained checkpoint path \
for fine tune or incremental learning')
export_args = export_parser.parse_args()
export_args.is_training = False
export_args.run_distribute = False
return export_args

View File

@ -19,6 +19,8 @@ import os
from easydict import EasyDict as ed
def set_config(args):
if not args.run_distribute:
args.run_distribute = False
config_cpu = ed({
"num_classes": 26,
"image_height": 224,
@ -38,8 +40,9 @@ def set_config(args):
"keep_checkpoint_max": 20,
"save_checkpoint_path": "./",
"platform": args.platform,
"run_distribute": False,
"activation": "Softmax"
"activation": "Softmax",
"export_format": "MINDIR",
"export_file": "mobilenetv2.mindir"
})
config_gpu = ed({
"num_classes": 1000,
@ -62,7 +65,9 @@ def set_config(args):
"platform": args.platform,
"ccl": "nccl",
"run_distribute": args.run_distribute,
"activation": "Softmax"
"activation": "Softmax",
"export_format": "MINDIR",
"export_file": "mobilenetv2.mindir"
})
config_ascend = ed({
"num_classes": 1000,
@ -88,7 +93,9 @@ def set_config(args):
"rank_id": int(os.getenv('RANK_ID', '0')),
"rank_size": int(os.getenv('RANK_SIZE', '1')),
"run_distribute": int(os.getenv('RANK_SIZE', '1')) > 1.,
"activation": "Softmax"
"activation": "Softmax",
"export_format": "MINDIR",
"export_file": "mobilenetv2.mindir"
})
config = ed({"CPU": config_cpu,
"GPU": config_gpu,

View File

@ -9,6 +9,7 @@
- [Training Process](#training-process)
- [Evaluation Process](#evaluation-process)
- [Evaluation](#evaluation)
- [Export MindIR](#export-mindir)
- [Model Description](#model-description)
- [Performance](#performance)
- [Training Performance](#evaluation-performance)
@ -57,18 +58,19 @@ Dataset used: [imagenet](http://www.image-net.org/)
```python
├── MobileNetV3
├── Readme.md # descriptions about MobileNetV3
├── Readme.md # descriptions about MobileNetV3
├── scripts
│ ├──run_train.sh # shell script for train
│ ├──run_eval.sh # shell script for evaluation
│ ├──run_train.sh # shell script for train
│ ├──run_eval.sh # shell script for evaluation
├── src
│ ├──config.py # parameter configuration
│ ├──dataset.py # creating dataset
│ ├──config.py # parameter configuration
│ ├──dataset.py # creating dataset
│ ├──lr_generator.py # learning rate config
│ ├──mobilenetV3.py # MobileNetV3 architecture
├── train.py # training script
├── eval.py # evaluation script
├── mindspore_hub_conf.py # mindspore hub interface
├── train.py # training script
├── eval.py # evaluation script
├── export.py # export mindir script
├── mindspore_hub_conf.py # mindspore hub interface
```
## [Training process](#contents)
@ -129,6 +131,14 @@ Inference result will be stored in the example path, you can find result like th
result: {'acc': 0.71976314102564111} ckpt=/path/to/checkpoint/mobilenet-200_625.ckpt
```
## [Export MindIR](#contents)
Change the export mode and export file in `src/config.py`, and run `export.py`.
```
python export.py --device_target [PLATFORM] --checkpoint_path [CKPT_PATH]
```
# [Model description](#contents)
## [Performance](#contents)

View File

@ -0,0 +1,45 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
mobilenetv3 export mindir.
"""
import argparse
import numpy as np
from mindspore import context, Tensor
from mindspore.train.serialization import load_checkpoint, load_param_into_net, export
from src.config import config_gpu
from src.mobilenetV3 import mobilenet_v3_large
parser = argparse.ArgumentParser(description='Image classification')
parser.add_argument('--checkpoint_path', type=str, required=True, help='Checkpoint file path')
parser.add_argument('--device_target', type=str, default="GPU", help='run device_target')
args_opt = parser.parse_args()
if __name__ == '__main__':
cfg = None
if args_opt.device_target == "GPU":
cfg = config_gpu
context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
else:
raise ValueError("Unsupported device_target.")
net = mobilenet_v3_large(num_classes=cfg.num_classes, activation="Softmax")
param_dict = load_checkpoint(args_opt.checkpoint_path)
load_param_into_net(net, param_dict)
input_shp = [1, 3, cfg.image_height, cfg.image_width]
input_array = Tensor(np.random.uniform(-1.0, 1.0, size=input_shp).astype(np.float32))
export(net, input_array, file_name=cfg.export_file, file_format=cfg.export_format)

View File

@ -33,4 +33,6 @@ config_gpu = ed({
"save_checkpoint_epochs": 1,
"keep_checkpoint_max": 500,
"save_checkpoint_path": "./checkpoint",
"export_format": "MINDIR",
"export_file": "mobilenetv3.mindir"
})

View File

@ -12,6 +12,7 @@
- [Script Parameters](#script-parameters)
- [Training Process](#training-process)
- [Evaluation Process](#evaluation-process)
- [Export MindIR](#export-mindir)
- [Model Description](#model-description)
- [Performance](#performance)
- [Training Performance](#evaluation-performance)
@ -97,6 +98,7 @@ For FP16 operators, if the input data type is FP32, the backend of MindSpore wil
├─warmup_step_lr.py # warmup step learning rate
├─eval.py # eval net
├──train.py # train net
├──export.py # export mindir script
├──mindspore_hub_conf.py # mindspore hub interface
```
@ -206,6 +208,14 @@ acc=78.16%(TOP1)
acc=93.88%(TOP5)
```
## [Export MindIR](#contents)
Change the export mode and export file in `src/config.py`, and run `export.py`.
```
python export.py --platform PLATFORM --pretrained CKPT_PATH
```
# [Model description](#contents)
## [Performance](#contents)

View File

@ -0,0 +1,56 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
resnext export mindir.
"""
import argparse
import numpy as np
from mindspore import context, Tensor
from mindspore.train.serialization import load_checkpoint, load_param_into_net, export
from src.config import config
from src.image_classification import get_network
def parse_args():
"""parse_args"""
parser = argparse.ArgumentParser('mindspore classification test')
parser.add_argument('--platform', type=str, default='Ascend', choices=('Ascend', 'GPU'), help='run platform')
parser.add_argument('--pretrained', type=str, required=True, help='fully path of pretrained model to load. '
'If it is a direction, it will test all ckpt')
args, _ = parser.parse_known_args()
args.image_size = config.image_size
args.num_classes = config.num_classes
args.backbone = config.backbone
args.image_size = list(map(int, config.image_size.split(',')))
args.image_height = args.image_size[0]
args.image_width = args.image_size[1]
args.export_format = config.export_format
args.export_file = config.export_file
return args
if __name__ == '__main__':
args_export = parse_args()
context.set_context(mode=context.GRAPH_MODE, device_target=args_export.platform)
net = get_network(args_export.backbone, num_classes=args_export.num_classes, platform=args_export.platform)
param_dict = load_checkpoint(args_export.pretrained)
load_param_into_net(net, param_dict)
input_shp = [1, 3, args_export.image_height, args_export.image_width]
input_array = Tensor(np.random.uniform(-1.0, 1.0, size=input_shp).astype(np.float32))
export(net, input_array, file_name=args_export.export_file, file_format=args_export.export_format)

View File

@ -42,5 +42,7 @@ config = ed({
"is_save_on_master": 1,
"rank": 0,
"group_size": 1
"group_size": 1,
"export_format": "MINDIR",
"export_file": "resnext50.mindir"
})