Skip to content

Commit

Permalink
PT torch_distributed, reasonable defaults
Browse files Browse the repository at this point in the history
  • Loading branch information
albertz committed Nov 26, 2023
1 parent 95478f8 commit edf65e4
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions returnn/torch/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,8 @@ def __init__(self, config: Config):
torch_distributed = config.typed_value("torch_distributed")
if torch_distributed is not None:
self._use_torch_distributed = True
self._torch_distributed_class = torch_distributed.get("class", None)
self._torch_distributed_options = torch_distributed.get("options", None)
self._torch_distributed_class = torch_distributed.get("class", DistributedDataParallel)
self._torch_distributed_options = torch_distributed.get("options", {})

import returnn.torch.distributed

Expand Down

0 comments on commit edf65e4

Please sign in to comment.