!18907 Fix acc of resnet34

Merge pull request !18907 from zhouyaqiang0/fix_resnet34
This commit is contained in:
i-robot 2021-06-26 09:24:54 +00:00 committed by Gitee
commit da7df61924
4 changed files with 8 additions and 57 deletions

View File

@ -35,9 +35,11 @@ args_opt = parser.parse_args()
set_seed(1)
if args_opt.net in ("resnet18", "resnet50"):
if args_opt.net in ("resnet18", "resnet34", "resnet50"):
if args_opt.net == "resnet18":
from src.resnet import resnet18 as resnet
if args_opt.net == "resnet34":
from src.resnet import resnet34 as resnet
if args_opt.net == "resnet50":
from src.resnet import resnet50 as resnet
if args_opt.dataset == "cifar10":
@ -46,10 +48,6 @@ if args_opt.net in ("resnet18", "resnet50"):
else:
from src.config import config2 as config
from src.dataset import create_dataset2 as create_dataset
elif args_opt.net == "resnet34":
from src.resnet import resnet34 as resnet
from src.config import config_resnet34 as config
from src.dataset import create_dataset2 as create_dataset
elif args_opt.net == "resnet101":
from src.resnet import resnet101 as resnet
from src.config import config3 as config

View File

@ -153,24 +153,3 @@ config_thor_gpu = ed({
"damping_decay": 0.5467,
"frequency": 834,
})
# config for resnet34, imagenet2012, Ascend
config_resnet34 = ed({
"class_num": 1001,
"batch_size": 256,
"loss_scale": 1024,
"momentum": 0.9,
"weight_decay": 1e-4,
"epoch_size": 90,
"pretrain_epoch_size": 0,
"save_checkpoint": True,
"save_checkpoint_epochs": 5,
"keep_checkpoint_max": 1,
"save_checkpoint_path": "./",
"warmup_epochs": 0,
"optimizer": 'Momentum',
"use_label_smooth": True,
"label_smooth_factor": 0.1,
"lr_init": 0.0,
"lr_max": 1.0,
"lr_end": 0.0})

View File

@ -238,20 +238,3 @@ def get_thor_damping(global_step, damping_init, decay_rate, total_epochs, steps_
damping_each_step = np.array(damping_each_step).astype(np.float32)
damping_now = damping_each_step[current_step:]
return damping_now
def get_resnet34_lr(lr_init, lr_end, lr_max, warmup_epochs, total_epochs, steps_per_epoch):
total_steps = steps_per_epoch * total_epochs
warmup_steps = steps_per_epoch * warmup_epochs
lr_each_step = []
for i in range(total_steps):
if i < warmup_steps:
lr = lr_init + (lr_max - lr_init) * i / warmup_steps
else:
lr = lr_max - (lr_max - lr_end) * (i - warmup_steps) / (total_steps - warmup_steps)
lr_each_step.append(lr)
lr_each_step = np.array(lr_each_step).astype(np.float32)
return lr_each_step

View File

@ -32,7 +32,7 @@ from mindspore.parallel import set_algo_parameters
import mindspore.nn as nn
import mindspore.common.initializer as weight_init
import mindspore.log as logger
from src.lr_generator import get_lr, warmup_cosine_annealing_lr, get_resnet34_lr
from src.lr_generator import get_lr, warmup_cosine_annealing_lr
from src.CrossEntropySmooth import CrossEntropySmooth
from src.config import cfg
from src.eval_callback import EvalCallBack
@ -69,9 +69,11 @@ args_opt = parser.parse_args()
set_seed(1)
if args_opt.net in ("resnet18", "resnet50"):
if args_opt.net in ("resnet18", "resnet34", "resnet50"):
if args_opt.net == "resnet18":
from src.resnet import resnet18 as resnet
if args_opt.net == "resnet34":
from src.resnet import resnet34 as resnet
if args_opt.net == "resnet50":
from src.resnet import resnet50 as resnet
if args_opt.dataset == "cifar10":
@ -83,10 +85,6 @@ if args_opt.net in ("resnet18", "resnet50"):
from src.dataset import create_dataset2 as create_dataset
else:
from src.dataset import create_dataset_pynative as create_dataset
elif args_opt.net == "resnet34":
from src.resnet import resnet34 as resnet
from src.config import config_resnet34 as config
from src.dataset import create_dataset2 as create_dataset
elif args_opt.net == "resnet101":
from src.resnet import resnet101 as resnet
from src.config import config3 as config
@ -199,13 +197,6 @@ if __name__ == '__main__':
else:
lr = warmup_cosine_annealing_lr(config.lr, step_size, config.warmup_epochs, config.epoch_size,
config.pretrain_epoch_size * step_size)
if args_opt.net == "resnet34":
lr = get_resnet34_lr(lr_init=config.lr_init,
lr_end=config.lr_end,
lr_max=config.lr_max,
warmup_epochs=config.warmup_epochs,
total_epochs=config.epoch_size,
steps_per_epoch=step_size)
lr = Tensor(lr)
# define opt
@ -233,7 +224,7 @@ if __name__ == '__main__':
metrics = {"acc"}
if args_opt.run_distribute:
metrics = {'acc': DistAccuracy(batch_size=config.batch_size, device_num=args_opt.device_num)}
if (args_opt.net not in ("resnet18", "resnet50", "resnet101", "se-resnet50")) or \
if (args_opt.net not in ("resnet18", "resnet34", "resnet50", "resnet101", "se-resnet50")) or \
args_opt.parameter_server or target == "CPU":
## fp32 training
model = Model(net, loss_fn=loss, optimizer=opt, metrics=metrics, eval_network=dist_eval_network)