Skip to content

Commit

Permalink
fix: failed to show lr in wandb
Browse files Browse the repository at this point in the history
  • Loading branch information
AtticusZeller committed Jul 3, 2024
1 parent 521b051 commit 378825d
Showing 1 changed file with 4 additions and 8 deletions.
12 changes: 4 additions & 8 deletions src/eval/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,17 +60,13 @@ def log_LR(self, model: torch.nn.Module, schedulers: list, step: int):
:param schedulers: List of schedulers corresponding to the optimizers
:param step: Current step number
"""
lr_info = {}
scheduler_info = {}
for i, (optimizer, scheduler) in enumerate(zip(model.optimizers, schedulers)):
for j, param_group in enumerate(optimizer.param_groups):
lr = param_group["lr"]
name = param_group.get("name", f"optimizer_{i}_group_{j}")
lr_info[f"Learning Rate/{name}"] = lr
scheduler_info[f"Scheduler Type/{name}"] = scheduler.__class__.__name__

wandb.log(lr_info, step=step)
wandb.log(scheduler_info, step=step)
param_name = param_group.get("name", f"optimizer_{i}_group_{j}")
s_type = f"Sch:{scheduler.__class__.__name__}"
l_name = s_type + f" LR: {param_name}"
wandb.log({l_name: lr}, step=step)

# BUG: failed to show in wandb
def log_gradients(self, model: torch.nn.Module, step: int):
Expand Down

0 comments on commit 378825d

Please sign in to comment.