Skip to content

Commit

Permalink
PT effective learning rate, small fix again
Browse files Browse the repository at this point in the history
  • Loading branch information
albertz committed Oct 20, 2023
1 parent 41ccde0 commit a506073
Showing 1 changed file with 2 additions and 1 deletion.
3 changes: 2 additions & 1 deletion returnn/torch/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,10 +106,11 @@ def __init__(self, *, config, network, device, initial_learning_rate=1.0):
), "please specify **kwargs in dynamic_learning_rate for future compatibility"
else:
raise NotImplementedError("not implemented for not callable dynamic_learning_rate")
self._update_effective_learning_rate()

self.optimizer = None # type: typing.Optional[torch.optim.Optimizer]

self._update_effective_learning_rate()

def set_learning_rate(self, value):
"""
Updates the learning rate of the optimizer at each (sub)epoch.
Expand Down

0 comments on commit a506073

Please sign in to comment.