Skip to content

Commit

Permalink
allow for auto-calling ema after optimizer step using `register_step_…
Browse files Browse the repository at this point in the history
…post_hook`
  • Loading branch information
lucidrains committed Oct 20, 2024
1 parent 0b59eaf commit 19350c9
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 2 deletions.
10 changes: 9 additions & 1 deletion ema_pytorch/ema_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,13 +185,21 @@ def init_ema(
self.parameter_names = {name for name, param in self.ema_model.named_parameters() if torch.is_floating_point(param) or torch.is_complex(param)}
self.buffer_names = {name for name, buffer in self.ema_model.named_buffers() if torch.is_floating_point(buffer) or torch.is_complex(buffer)}

def add_to_optimizer_post_step_hook(self, optimizer):
assert hasattr(optimizer, 'register_step_post_hook')

def hook(*_):
self.update()

optimizer.register_step_post_hook(hook)

@property
def model(self):
return self.online_model if self.include_online_model else self.online_model[0]

def eval(self):
return self.ema_model.eval()

def restore_ema_model_device(self):
device = self.initted.device
self.ema_model.to(device)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'ema-pytorch',
packages = find_packages(exclude=[]),
version = '0.7.0',
version = '0.7.1',
license='MIT',
description = 'Easy way to keep track of exponential moving average version of your pytorch module',
author = 'Phil Wang',
Expand Down

0 comments on commit 19350c9

Please sign in to comment.