generated from HephaestusProject/template
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtrain.py
80 lines (63 loc) · 2.07 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
"""
This script was made by Nick at 19/07/20.
To implement code for training your model.
"""
from argparse import ArgumentParser, Namespace
import os
import pytorch_lightning as pl
from pytorch_lightning.callbacks import ModelCheckpoint
from pytorch_lightning.loggers import WandbLogger
import torch
from src.utils import Config, get_dataloader, get_exp_name
pl.seed_everything(777)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
def parse_args() -> Namespace:
# configurations
parser = ArgumentParser(description="Run Autoencoders")
parser.add_argument(
"--cfg-dataset",
default="./configs/dataset/mnist.yml",
type=str,
help="select dataset",
)
parser.add_argument(
"--cfg-model", default="./configs/model/AE.yml", type=str, help="select model"
)
parser.add_argument("--wandb", action="store_true", help="use wandb logger")
return parser.parse_args()
def run(cfg: dict, use_wandb: bool):
# Set logger
exp_name = get_exp_name(cfg.model.params)
if use_wandb:
wandb_logger = WandbLogger(
name=exp_name,
project="hephaestusproject-pytorch-AE",
log_model=True,
)
else:
wandb_logger = None
# Create dataloader
train_dataloader, val_dataloader = get_dataloader(cfg)
# Create model
Model = getattr(__import__("src"), cfg.model.name)
runner = Model(cfg.model.params)
# Set trainer (pytorch lightening)
os.makedirs(cfg.model.ckpt.path, exist_ok=True)
trainer = pl.Trainer(
logger=wandb_logger,
gpus=-1 if torch.cuda.is_available() else 0,
max_epochs=cfg.model.params.max_epochs,
deterministic=True,
checkpoint_callback=ModelCheckpoint(cfg.model.ckpt.path),
)
# Train
trainer.fit(
runner, train_dataloader=train_dataloader, val_dataloaders=val_dataloader
)
if __name__ == "__main__":
args = parse_args()
cfg = Config()
cfg.add_dataset(args.cfg_dataset)
cfg.add_model(args.cfg_model)
run(cfg, args.wandb)