forked from mindspore-Ecosystem/mindspore
!16883 alexnet_lenet update
From: @huchunmei Reviewed-by: @oacjiewen,@c_34 Signed-off-by: @c_34
This commit is contained in:
commit
e34fa42980
|
@ -84,9 +84,15 @@ sh run_standalone_eval_ascend.sh [DATA_PATH] [CKPT_NAME]
|
||||||
├── src
|
├── src
|
||||||
│ ├──dataset.py // creating dataset
|
│ ├──dataset.py // creating dataset
|
||||||
│ ├──alexnet.py // alexnet architecture
|
│ ├──alexnet.py // alexnet architecture
|
||||||
│ ├──config.py // parameter configuration
|
│ └──model_utils
|
||||||
|
│ ├──config.py // Processing configuration parameters
|
||||||
|
│ ├──device_adapter.py // Get cloud ID
|
||||||
|
│ ├──local_adapter.py // Get local ID
|
||||||
|
│ └──moxing_adapter.py // Parameter processing
|
||||||
|
├── default_config.yaml // Training parameter profile(cifar10)
|
||||||
|
├── config_imagenet.yaml // Training parameter profile(imagenet)
|
||||||
├── train.py // training script
|
├── train.py // training script
|
||||||
├── eval.py // evaluation script
|
├── eval.py // evaluation script
|
||||||
```
|
```
|
||||||
|
|
||||||
### [Script Parameters](#contents)
|
### [Script Parameters](#contents)
|
||||||
|
|
|
@ -88,7 +88,13 @@ sh run_standalone_eval_ascend.sh [DATA_PATH] [CKPT_NAME]
|
||||||
├── src
|
├── src
|
||||||
│ ├──dataset.py // 创建数据集
|
│ ├──dataset.py // 创建数据集
|
||||||
│ ├──alexnet.py // AlexNet架构
|
│ ├──alexnet.py // AlexNet架构
|
||||||
│ ├──config.py // 参数配置
|
| └──model_utils
|
||||||
|
| ├──config.py // 训练配置
|
||||||
|
| ├──device_adapter.py // 获取云上id
|
||||||
|
| ├──local_adapter.py // 获取本地id
|
||||||
|
| └──moxing_adapter.py // 参数处理
|
||||||
|
├── default_config.yaml // 训练参数配置文件
|
||||||
|
├── config_imagenet.yaml // 训练参数配置文件
|
||||||
├── train.py // 训练脚本
|
├── train.py // 训练脚本
|
||||||
├── eval.py // 评估脚本
|
├── eval.py // 评估脚本
|
||||||
```
|
```
|
||||||
|
|
|
@ -53,7 +53,7 @@ def eval_alexnet():
|
||||||
network = AlexNet(config.num_classes, phase='test')
|
network = AlexNet(config.num_classes, phase='test')
|
||||||
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
||||||
opt = nn.Momentum(network.trainable_params(), config.learning_rate, config.momentum)
|
opt = nn.Momentum(network.trainable_params(), config.learning_rate, config.momentum)
|
||||||
ds_eval = create_dataset_cifar10(config.data_path, config.batch_size, status="test", \
|
ds_eval = create_dataset_cifar10(config, config.data_path, config.batch_size, status="test", \
|
||||||
target=config.device_target)
|
target=config.device_target)
|
||||||
param_dict = load_checkpoint(config.ckpt_path)
|
param_dict = load_checkpoint(config.ckpt_path)
|
||||||
print("load checkpoint from [{}].".format(config.ckpt_path))
|
print("load checkpoint from [{}].".format(config.ckpt_path))
|
||||||
|
@ -64,7 +64,7 @@ def eval_alexnet():
|
||||||
elif config.dataset_name == 'imagenet':
|
elif config.dataset_name == 'imagenet':
|
||||||
network = AlexNet(config.num_classes, phase='test')
|
network = AlexNet(config.num_classes, phase='test')
|
||||||
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction="mean")
|
||||||
ds_eval = create_dataset_imagenet(config.data_path, config.batch_size, training=False)
|
ds_eval = create_dataset_imagenet(config, config.data_path, config.batch_size, training=False)
|
||||||
param_dict = load_checkpoint(config.ckpt_path)
|
param_dict = load_checkpoint(config.ckpt_path)
|
||||||
print("load checkpoint from [{}].".format(config.ckpt_path))
|
print("load checkpoint from [{}].".format(config.ckpt_path))
|
||||||
load_param_into_net(network, param_dict)
|
load_param_into_net(network, param_dict)
|
||||||
|
|
|
@ -1,54 +0,0 @@
|
||||||
# Copyright 2020 Huawei Technologies Co., Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
# ============================================================================
|
|
||||||
"""
|
|
||||||
network config setting, will be used in train.py
|
|
||||||
"""
|
|
||||||
|
|
||||||
from easydict import EasyDict as edict
|
|
||||||
|
|
||||||
alexnet_cifar10_cfg = edict({
|
|
||||||
'num_classes': 10,
|
|
||||||
'learning_rate': 0.002,
|
|
||||||
'momentum': 0.9,
|
|
||||||
'epoch_size': 30,
|
|
||||||
'batch_size': 32,
|
|
||||||
'buffer_size': 1000,
|
|
||||||
'image_height': 227,
|
|
||||||
'image_width': 227,
|
|
||||||
'save_checkpoint_steps': 1562,
|
|
||||||
'keep_checkpoint_max': 10,
|
|
||||||
'air_name': "alexnet.air",
|
|
||||||
})
|
|
||||||
|
|
||||||
alexnet_imagenet_cfg = edict({
|
|
||||||
'num_classes': 1000,
|
|
||||||
'learning_rate': 0.13,
|
|
||||||
'momentum': 0.9,
|
|
||||||
'epoch_size': 150,
|
|
||||||
'batch_size': 256,
|
|
||||||
'buffer_size': None, # invalid parameter
|
|
||||||
'image_height': 224,
|
|
||||||
'image_width': 224,
|
|
||||||
'save_checkpoint_steps': 625,
|
|
||||||
'keep_checkpoint_max': 10,
|
|
||||||
'air_name': "alexnet.air",
|
|
||||||
|
|
||||||
# opt
|
|
||||||
'weight_decay': 0.0001,
|
|
||||||
'loss_scale': 1024,
|
|
||||||
|
|
||||||
# lr
|
|
||||||
'is_dynamic_loss_scale': 0,
|
|
||||||
})
|
|
|
@ -22,10 +22,9 @@ import mindspore.dataset.transforms.c_transforms as C
|
||||||
import mindspore.dataset.vision.c_transforms as CV
|
import mindspore.dataset.vision.c_transforms as CV
|
||||||
from mindspore.common import dtype as mstype
|
from mindspore.common import dtype as mstype
|
||||||
from mindspore.communication.management import get_rank, get_group_size
|
from mindspore.communication.management import get_rank, get_group_size
|
||||||
from .config import alexnet_cifar10_cfg, alexnet_imagenet_cfg
|
|
||||||
|
|
||||||
|
|
||||||
def create_dataset_cifar10(data_path, batch_size=32, repeat_size=1, status="train", target="Ascend"):
|
def create_dataset_cifar10(cfg, data_path, batch_size=32, repeat_size=1, status="train", target="Ascend"):
|
||||||
"""
|
"""
|
||||||
create dataset for train or test
|
create dataset for train or test
|
||||||
"""
|
"""
|
||||||
|
@ -40,7 +39,7 @@ def create_dataset_cifar10(data_path, batch_size=32, repeat_size=1, status="trai
|
||||||
num_shards=device_num, shard_id=rank_id)
|
num_shards=device_num, shard_id=rank_id)
|
||||||
rescale = 1.0 / 255.0
|
rescale = 1.0 / 255.0
|
||||||
shift = 0.0
|
shift = 0.0
|
||||||
cfg = alexnet_cifar10_cfg
|
# cfg = alexnet_cifar10_cfg
|
||||||
|
|
||||||
resize_op = CV.Resize((cfg.image_height, cfg.image_width))
|
resize_op = CV.Resize((cfg.image_height, cfg.image_width))
|
||||||
rescale_op = CV.Rescale(rescale, shift)
|
rescale_op = CV.Rescale(rescale, shift)
|
||||||
|
@ -65,7 +64,7 @@ def create_dataset_cifar10(data_path, batch_size=32, repeat_size=1, status="trai
|
||||||
return cifar_ds
|
return cifar_ds
|
||||||
|
|
||||||
|
|
||||||
def create_dataset_imagenet(dataset_path, batch_size=32, repeat_num=1, training=True,
|
def create_dataset_imagenet(cfg, dataset_path, batch_size=32, repeat_num=1, training=True,
|
||||||
num_parallel_workers=None, shuffle=None, sampler=None, class_indexing=None):
|
num_parallel_workers=None, shuffle=None, sampler=None, class_indexing=None):
|
||||||
"""
|
"""
|
||||||
create a train or eval imagenet2012 dataset for resnet50
|
create a train or eval imagenet2012 dataset for resnet50
|
||||||
|
@ -82,7 +81,7 @@ def create_dataset_imagenet(dataset_path, batch_size=32, repeat_num=1, training=
|
||||||
"""
|
"""
|
||||||
|
|
||||||
device_num, rank_id = _get_rank_info()
|
device_num, rank_id = _get_rank_info()
|
||||||
cfg = alexnet_imagenet_cfg
|
# cfg = alexnet_imagenet_cfg
|
||||||
|
|
||||||
num_parallel_workers = 16
|
num_parallel_workers = 16
|
||||||
if device_num == 1:
|
if device_num == 1:
|
||||||
|
|
|
@ -82,9 +82,9 @@ def train_alexnet():
|
||||||
context.set_context(device_id=get_device_id())
|
context.set_context(device_id=get_device_id())
|
||||||
|
|
||||||
if config.dataset_name == "cifar10":
|
if config.dataset_name == "cifar10":
|
||||||
ds_train = create_dataset_cifar10(config.data_path, config.batch_size, target=config.device_target)
|
ds_train = create_dataset_cifar10(config, config.data_path, config.batch_size, target=config.device_target)
|
||||||
elif config.dataset_name == "imagenet":
|
elif config.dataset_name == "imagenet":
|
||||||
ds_train = create_dataset_imagenet(config.data_path, config.batch_size)
|
ds_train = create_dataset_imagenet(config, config.data_path, config.batch_size)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unsupported dataset.")
|
raise ValueError("Unsupported dataset.")
|
||||||
|
|
||||||
|
|
|
@ -100,7 +100,12 @@ sh run_standalone_eval_ascend.sh [DATA_PATH] [CKPT_NAME]
|
||||||
│ ├──aipp.cfg // aipp config
|
│ ├──aipp.cfg // aipp config
|
||||||
│ ├──dataset.py // creating dataset
|
│ ├──dataset.py // creating dataset
|
||||||
│ ├──lenet.py // lenet architecture
|
│ ├──lenet.py // lenet architecture
|
||||||
│ ├──config.py // parameter configuration
|
│ └──model_utils
|
||||||
|
│ ├──config.py // Processing configuration parameters
|
||||||
|
│ ├──device_adapter.py // Get cloud ID
|
||||||
|
│ ├──local_adapter.py // Get local ID
|
||||||
|
│ └──moxing_adapter.py // Parameter processing
|
||||||
|
├── default_config.yaml // Training parameter profile(ascend)
|
||||||
├── train.py // training script
|
├── train.py // training script
|
||||||
├── eval.py // evaluation script
|
├── eval.py // evaluation script
|
||||||
├── postprocess.py // postprocess script
|
├── postprocess.py // postprocess script
|
||||||
|
|
|
@ -102,12 +102,17 @@ sh run_standalone_eval_ascend.sh [DATA_PATH] [CKPT_NAME]
|
||||||
│ ├──run_standalone_eval_ascend.sh // Ascend评估
|
│ ├──run_standalone_eval_ascend.sh // Ascend评估
|
||||||
├── src
|
├── src
|
||||||
│ ├──aipp.cfg // aipp配置文件
|
│ ├──aipp.cfg // aipp配置文件
|
||||||
│ ├──dataset.py // 创建数据集
|
│ ├──dataset.py // 创建数据集
|
||||||
│ ├──lenet.py // Lenet架构
|
│ ├──lenet.py // Lenet架构
|
||||||
│ ├──config.py // 参数配置
|
| └──model_utils
|
||||||
├── train.py // 训练脚本
|
| ├──config.py // 训练配置
|
||||||
├── eval.py // 评估脚本
|
| ├──device_adapter.py // 获取云上id
|
||||||
├── postprocess.py // 310推理后处理脚本
|
| ├──local_adapter.py // 获取本地id
|
||||||
|
| └──moxing_adapter.py // 参数处理
|
||||||
|
├── default_config.yaml // 训练参数配置文件
|
||||||
|
├── train.py // 训练脚本
|
||||||
|
├── eval.py // 评估脚本
|
||||||
|
├── postprocess.py // 310推理后处理脚本
|
||||||
```
|
```
|
||||||
|
|
||||||
## 脚本参数
|
## 脚本参数
|
||||||
|
|
|
@ -18,7 +18,7 @@ num_classes: 10
|
||||||
lr: 0.01
|
lr: 0.01
|
||||||
momentum: 0.9
|
momentum: 0.9
|
||||||
epoch_size: 10
|
epoch_size: 10
|
||||||
batch_size: 15 # 32
|
batch_size: 32
|
||||||
buffer_size: 1000
|
buffer_size: 1000
|
||||||
image_height: 32
|
image_height: 32
|
||||||
image_width: 32
|
image_width: 32
|
||||||
|
@ -27,7 +27,7 @@ keep_checkpoint_max: 10
|
||||||
air_name: "lenet"
|
air_name: "lenet"
|
||||||
device_id: 0
|
device_id: 0
|
||||||
file_name: "lenet"
|
file_name: "lenet"
|
||||||
file_format: "AIR"
|
file_format: "MINDIR"
|
||||||
|
|
||||||
model_name: lenet
|
model_name: lenet
|
||||||
learning_rate: 0.002
|
learning_rate: 0.002
|
||||||
|
@ -37,6 +37,10 @@ dataset_sink_mode: True
|
||||||
save_checkpoint: True
|
save_checkpoint: True
|
||||||
save_checkpoint_epochs: 2
|
save_checkpoint_epochs: 2
|
||||||
|
|
||||||
|
# lenet acc calculation
|
||||||
|
result_path: '' # "result files path."
|
||||||
|
img_path: '' # "image file path."
|
||||||
|
|
||||||
---
|
---
|
||||||
# Config description for each option
|
# Config description for each option
|
||||||
enable_modelarts: 'Whether training on modelarts, default: False'
|
enable_modelarts: 'Whether training on modelarts, default: False'
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
"""export checkpoint file into air, mindir models"""
|
"""export checkpoint file into air, onnx, mindir models"""
|
||||||
|
|
||||||
from src.model_utils.config import config
|
from src.model_utils.config import config
|
||||||
from src.model_utils.device_adapter import get_device_id
|
from src.model_utils.device_adapter import get_device_id
|
||||||
|
|
|
@ -14,14 +14,10 @@
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
"""post process for 310 inference"""
|
"""post process for 310 inference"""
|
||||||
import os
|
import os
|
||||||
import argparse
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from src.model_utils.config import config
|
||||||
|
|
||||||
batch_size = 1
|
batch_size = 1
|
||||||
parser = argparse.ArgumentParser(description="lenet acc calculation")
|
|
||||||
parser.add_argument("--result_path", type=str, required=True, help="result files path.")
|
|
||||||
parser.add_argument("--img_path", type=str, required=True, help="image file path.")
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
def calcul_acc(labels, preds):
|
def calcul_acc(labels, preds):
|
||||||
|
@ -44,4 +40,4 @@ def get_result(result_path, img_path):
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
get_result(args.result_path, args.img_path)
|
get_result(config.result_path, config.img_path)
|
||||||
|
|
|
@ -1,33 +0,0 @@
|
||||||
# Copyright 2020 Huawei Technologies Co., Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
# ============================================================================
|
|
||||||
"""
|
|
||||||
network config setting, will be used in train.py
|
|
||||||
"""
|
|
||||||
|
|
||||||
from easydict import EasyDict as edict
|
|
||||||
|
|
||||||
mnist_cfg = edict({
|
|
||||||
'num_classes': 10,
|
|
||||||
'lr': 0.01,
|
|
||||||
'momentum': 0.9,
|
|
||||||
'epoch_size': 10,
|
|
||||||
'batch_size': 32,
|
|
||||||
'buffer_size': 1000,
|
|
||||||
'image_height': 32,
|
|
||||||
'image_width': 32,
|
|
||||||
'save_checkpoint_steps': 1875,
|
|
||||||
'keep_checkpoint_max': 10,
|
|
||||||
'air_name': "lenet",
|
|
||||||
})
|
|
Loading…
Reference in New Issue