Skip to content

Commit

Permalink
Adjusted output for parallel
Browse files Browse the repository at this point in the history
  • Loading branch information
RandomDefaultUser committed Apr 26, 2024
1 parent ffa3082 commit 04b0050
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion mala/network/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __init__(self, params, network, data, optimizer_dict=None):
super(Trainer, self).__init__(params, network, data)

if self.parameters_full.use_ddp:
print("wrapping model in ddp..")
printout("DDP activated, wrapping model in DDP.", min_verbosity=1)
# JOSHR: using streams here to maintain compatibility with
# graph capture
s = torch.cuda.Stream()
Expand Down

0 comments on commit 04b0050

Please sign in to comment.