[DDP] Change the --no-optimize-ddp flag to reflect the latest usage (#119437)
Compiled DDP now has 4 different optimization modes. This PR changes the Dynamo benchmark flag to reflect that change. Pull Request resolved: https://github.com/pytorch/pytorch/pull/119437 Approved by: https://github.com/wconstab, https://github.com/xmfan
This commit is contained in:
parent
c2522554dd
commit
c0e5cca4f8
|
@ -2986,9 +2986,10 @@ def parse_args(args=None):
|
|||
""",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-optimize-ddp",
|
||||
action="store_true",
|
||||
help="Disables dynamo DDPOptimizer (graph breaks). (Applies only when using --ddp benchmark mode).",
|
||||
"--optimize-ddp-mode",
|
||||
type=str,
|
||||
default="ddp_optimizer",
|
||||
help="Specify the DDP optimization mode -- the value of torch._dynamo.config.optimize_ddp.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--distributed-master-port",
|
||||
|
@ -3438,11 +3439,7 @@ def run(runner, args, original_dir=None):
|
|||
)
|
||||
if args.ddp:
|
||||
assert args.training, "DDP benchmark requires --training mode"
|
||||
if args.no_optimize_ddp:
|
||||
torch._dynamo.config.optimize_ddp = False
|
||||
else:
|
||||
# TODO(whc) after enabling DDPOptimizer by default this could be removed or assert
|
||||
torch._dynamo.config.optimize_ddp = True
|
||||
torch._dynamo.config.optimize_ddp = args.optimize_ddp_mode
|
||||
if args.only == "dlrm":
|
||||
log.error(
|
||||
"DLRM+DDP is unsupported as it requires sharding the embedding layer separately from DDP"
|
||||
|
|
Loading…
Reference in New Issue