repair distribute training for vgg in D

This commit is contained in:
ms_yan 2020-07-31 12:27:43 +08:00
parent 6eddd65cf1
commit 4cf23db838
2 changed files with 5 additions and 4 deletions

View File

@ -47,6 +47,6 @@ do
cd ./train_parallel$i || exit
echo "start training for rank $RANK_ID, device $DEVICE_ID"
env > env.log
python train.py --data_path=$2 --device_target="Ascend" --device_id=$i &> log &
python train.py --data_path=$2 --device_target="Ascend" --device_id=$i --is_distributed=1 &> log &
cd ..
done

View File

@ -191,12 +191,13 @@ if __name__ == '__main__':
if args.is_distributed:
if args.device_target == "Ascend":
init()
context.set_context(device_id=args.device_id)
elif args.device_target == "GPU":
init("nccl")
args.rank = get_rank()
args.group_size = get_group_size()
device_num = args.group_size
args.rank = get_rank()
args.group_size = get_group_size()
device_num = args.group_size
context.reset_auto_parallel_context()
context.set_auto_parallel_context(device_num=device_num, parallel_mode=ParallelMode.DATA_PARALLEL,
mirror_mean=True)