forked from mindspore-Ecosystem/mindspore
del ccl param in config.py
This commit is contained in:
parent
5b9ab437dd
commit
28bdda2bb3
|
@ -63,7 +63,6 @@ def set_config(args):
|
|||
"keep_checkpoint_max": 200,
|
||||
"save_checkpoint_path": "./",
|
||||
"platform": args.platform,
|
||||
"ccl": "nccl",
|
||||
"run_distribute": args.run_distribute,
|
||||
"activation": "Softmax",
|
||||
"export_format": "MINDIR",
|
||||
|
@ -88,7 +87,6 @@ def set_config(args):
|
|||
"keep_checkpoint_max": 200,
|
||||
"save_checkpoint_path": "./",
|
||||
"platform": args.platform,
|
||||
"ccl": "hccl",
|
||||
"device_id": int(os.getenv('DEVICE_ID', '0')),
|
||||
"rank_id": int(os.getenv('RANK_ID', '0')),
|
||||
"rank_size": int(os.getenv('RANK_SIZE', '1')),
|
||||
|
|
|
@ -38,7 +38,7 @@ def context_device_init(config):
|
|||
elif config.platform == "GPU":
|
||||
context.set_context(mode=context.GRAPH_MODE, device_target=config.platform, save_graphs=False)
|
||||
if config.run_distribute:
|
||||
init("nccl")
|
||||
init()
|
||||
context.set_auto_parallel_context(device_num=get_group_size(),
|
||||
parallel_mode=ParallelMode.DATA_PARALLEL,
|
||||
gradients_mean=True)
|
||||
|
|
Loading…
Reference in New Issue