Skip to content

Commit

Permalink
add plots for validation score during training. Fixed setting model t…
Browse files Browse the repository at this point in the history
…o eval and train modes
  • Loading branch information
LVeefkind committed Nov 4, 2024
1 parent 480f800 commit 0f6e718
Show file tree
Hide file tree
Showing 8 changed files with 225 additions and 82 deletions.
11 changes: 11 additions & 0 deletions neural_networks/create_grid_search.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
with open("parameters.txt", "w") as f:
i = 0
for model_name in ["dinov2_vitl14_reg", "dinov2_vitb14_reg", "efficientnet_v2_l"]:
for lr, batch_size in [(1e-4, 32), (1e-4, 64), (2e-4, 64), (5e-5, 32)]:
for dropout_p in [0.1, 0.15, 0.2, 0.25]:
for label_smoothing in [0.1, 0.2, 0.3]:
i += 1
cmd = f"{model_name} {lr} 1 {dropout_p} {batch_size} {label_smoothing} 0 0 16 16 {560 if 'dinov2' in model_name else 0} stack 0\n"
print(cmd)
f.writelines(cmd)
print(i)
223 changes: 144 additions & 79 deletions neural_networks/parameters.txt
Original file line number Diff line number Diff line change
@@ -1,79 +1,144 @@
dinov2_vitl14_reg 1e-04 1 0.25 32 0.1 0 0 16 16 560
dinov2_vitl14_reg 1e-04 1 0.25 32 0.1 1 1 16 16 560
dinov2_vitl14_reg 1e-04 1 0.25 32 0.2 1 0 16 16 560
dinov2_vitl14_reg 1e-04 1 0.25 32 0.2 0 1 16 16 560

dinov2_vitl14_reg 1e-04 1 0.1 32 0.1 0 0 16 16 560
dinov2_vitl14_reg 1e-04 1 0.1 32 0.1 1 1 16 16 560
dinov2_vitl14_reg 1e-04 1 0.1 32 0.2 1 0 16 16 560
dinov2_vitl14_reg 1e-04 1 0.1 32 0.2 0 1 16 16 560

dinov2_vitl14_reg 1e-05 1 0.25 32 0.1 0 0 16 16 560
dinov2_vitl14_reg 1e-05 1 0.25 32 0.1 1 1 16 16 560
dinov2_vitl14_reg 1e-05 1 0.25 32 0.2 1 0 16 16 560
dinov2_vitl14_reg 1e-05 1 0.25 32 0.2 0 1 16 16 560

dinov2_vitl14_reg 1e-05 1 0.1 32 0.1 0 0 16 16 560
dinov2_vitl14_reg 1e-05 1 0.1 32 0.1 1 1 16 16 560
dinov2_vitl14_reg 1e-05 1 0.1 32 0.2 1 0 16 16 560
dinov2_vitl14_reg 1e-05 1 0.1 32 0.2 0 1 16 16 560

dinov2_vitl14_reg 5e-05 1 0.25 32 0.1 0 0 16 16 560
dinov2_vitl14_reg 5e-05 1 0.25 32 0.1 1 1 16 16 560
dinov2_vitl14_reg 5e-05 1 0.25 32 0.2 1 0 16 16 560
dinov2_vitl14_reg 5e-05 1 0.25 32 0.2 0 1 16 16 560

dinov2_vitl14_reg 5e-05 1 0.1 32 0.1 0 0 16 16 560
dinov2_vitl14_reg 5e-05 1 0.1 32 0.1 1 1 16 16 560
dinov2_vitl14_reg 5e-05 1 0.1 32 0.2 1 0 16 16 560
dinov2_vitl14_reg 5e-05 1 0.1 32 0.2 0 1 16 16 560

efficientnet_v2_l 1e-04 1 0.25 32 0.2 0 0 16 16 0
efficientnet_v2_l 1e-04 1 0.25 32 0.2 1 0 16 16 0
efficientnet_v2_l 1e-04 1 0.25 32 0.1 1 0 16 16 0
efficientnet_v2_l 1e-04 1 0.25 32 0.1 0 0 16 16 0

efficientnet_v2_l 1e-04 1 0.1 32 0.2 0 0 16 16 0
efficientnet_v2_l 1e-04 1 0.1 32 0.2 1 0 16 16 0
efficientnet_v2_l 1e-04 1 0.1 32 0.1 1 0 16 16 0
efficientnet_v2_l 1e-04 1 0.1 32 0.1 0 0 16 16 0

efficientnet_v2_l 5e-05 1 0.25 32 0.2 0 0 16 16 0
efficientnet_v2_l 5e-05 1 0.25 32 0.2 1 0 16 16 0
efficientnet_v2_l 5e-05 1 0.25 32 0.1 1 0 16 16 0
efficientnet_v2_l 5e-05 1 0.25 32 0.1 0 0 16 16 0

efficientnet_v2_l 5e-05 1 0.1 32 0.2 0 0 16 16 0
efficientnet_v2_l 5e-05 1 0.1 32 0.2 1 0 16 16 0
efficientnet_v2_l 5e-05 1 0.1 32 0.1 1 0 16 16 0
efficientnet_v2_l 5e-05 1 0.1 32 0.1 0 0 16 16 0

efficientnet_v2_l 1e-05 1 0.25 32 0.2 0 0 16 16 0
efficientnet_v2_l 1e-05 1 0.25 32 0.2 1 0 16 16 0
efficientnet_v2_l 1e-05 1 0.25 32 0.1 1 0 16 16 0
efficientnet_v2_l 1e-05 1 0.25 32 0.1 0 0 16 16 0

efficientnet_v2_l 1e-05 1 0.1 32 0.2 0 0 16 16 0
efficientnet_v2_l 1e-05 1 0.1 32 0.2 1 0 16 16 0
efficientnet_v2_l 1e-05 1 0.1 32 0.1 1 0 16 16 0
efficientnet_v2_l 1e-05 1 0.1 32 0.1 0 0 16 16 0

dinov2_vitl14_reg 1e-04 1 0.25 32 0.1 0 0 16 16 560 conv 0
dinov2_vitl14_reg 1e-04 1 0.1 32 0.1 0 0 16 16 560 conv 0
dinov2_vitl14_reg 1e-04 1 0.25 32 0.1 0 0 16 16 560 conv 1
dinov2_vitl14_reg 1e-04 1 0.1 32 0.1 0 0 16 16 560 conv 1
dinov2_vitl14_reg 1e-04 1 0.25 32 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 1e-04 1 0.1 32 0.1 0 0 16 16 560 stack 0
efficientnet_v2_l 1e-04 1 0.1 32 0.2 0 0 16 16 0 stack 0
dinov2_vitl14_reg 1e-04 1 0.25 32 0.1 0 0 16 16 560 stack 1
dinov2_vitl14_reg 1e-04 1 0.1 32 0.1 0 0 16 16 560 stack 1
efficientnet_v2_l 1e-04 1 0.1 32 0.2 0 0 16 16 0 stack 1
dinov2_vitl14_reg 1e-04 1 0.25 32 0.1 0 1 16 16 560 conv 0
dinov2_vitl14_reg 1e-04 1 0.1 32 0.1 0 1 16 16 560 conv 0
dinov2_vitl14_reg 1e-04 1 0.25 32 0.1 0 1 16 16 560 conv 1
dinov2_vitl14_reg 1e-04 1 0.1 32 0.1 0 1 16 16 560 conv 1





dinov2_vitl14_reg 0.0001 1 0.1 32 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.1 32 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.1 32 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.15 32 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.15 32 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.15 32 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.2 32 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.2 32 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.2 32 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.25 32 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.25 32 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.25 32 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.1 64 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.1 64 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.1 64 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.15 64 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.15 64 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.15 64 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.2 64 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.2 64 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.2 64 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.25 64 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.25 64 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0001 1 0.25 64 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.1 64 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.1 64 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.1 64 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.15 64 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.15 64 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.15 64 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.2 64 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.2 64 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.2 64 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.25 64 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.25 64 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 0.0002 1 0.25 64 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.1 32 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.1 32 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.1 32 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.15 32 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.15 32 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.15 32 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.2 32 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.2 32 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.2 32 0.3 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.25 32 0.1 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.25 32 0.2 0 0 16 16 560 stack 0
dinov2_vitl14_reg 5e-05 1 0.25 32 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.1 32 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.1 32 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.1 32 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.15 32 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.15 32 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.15 32 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.2 32 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.2 32 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.2 32 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.25 32 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.25 32 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.25 32 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.1 64 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.1 64 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.1 64 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.15 64 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.15 64 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.15 64 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.2 64 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.2 64 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.2 64 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.25 64 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.25 64 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0001 1 0.25 64 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.1 64 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.1 64 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.1 64 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.15 64 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.15 64 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.15 64 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.2 64 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.2 64 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.2 64 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.25 64 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.25 64 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 0.0002 1 0.25 64 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.1 32 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.1 32 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.1 32 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.15 32 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.15 32 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.15 32 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.2 32 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.2 32 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.2 32 0.3 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.25 32 0.1 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.25 32 0.2 0 0 16 16 560 stack 0
dinov2_vitb14_reg 5e-05 1 0.25 32 0.3 0 0 16 16 560 stack 0
efficientnet_v2_l 0.0001 1 0.1 32 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.1 32 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.1 32 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.15 32 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.15 32 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.15 32 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.2 32 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.2 32 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.2 32 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.25 32 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.25 32 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.25 32 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.1 64 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.1 64 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.1 64 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.15 64 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.15 64 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.15 64 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.2 64 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.2 64 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.2 64 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.25 64 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.25 64 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0001 1 0.25 64 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.1 64 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.1 64 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.1 64 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.15 64 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.15 64 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.15 64 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.2 64 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.2 64 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.2 64 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.25 64 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.25 64 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 0.0002 1 0.25 64 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.1 32 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.1 32 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.1 32 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.15 32 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.15 32 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.15 32 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.2 32 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.2 32 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.2 32 0.3 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.25 32 0.1 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.25 32 0.2 0 0 16 16 0 stack 0
efficientnet_v2_l 5e-05 1 0.25 32 0.3 0 0 16 16 0 stack 0
Binary file not shown.
Binary file not shown.
61 changes: 61 additions & 0 deletions neural_networks/plots/score_plots.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import matplotlib.pyplot as plt
from tensorboard.backend.event_processing import event_accumulator


def plot_tensorboard_timeseries(event_file, tag, epochs=120):
# Load TensorBoard event file
ea = event_accumulator.EventAccumulator(event_file)
ea.Reload()

# Extract the time series data
if tag not in ea.Tags()["scalars"]:
raise ValueError(f"Tag '{tag}' not found in the event file.")

events = ea.Scalars(tag)
steps = [e.step / 54 for e in events][:epochs]
values = [e.value for e in events][:epochs]
total_time = events[epochs - 1].wall_time - events[0].wall_time
return steps, values, total_time


def plot_scores(stepsLists, valueLists, wall_times, models):

# Plot the time series
plt.figure(figsize=(10, 5))
for model_steps, value_values, wall_time, model in zip(
stepsLists, valueLists, wall_times, models
):
print(
f"{model} wall time was {wall_time:.0f} seconds ({wall_time/3600:.2f} hours)"
)
# print(model_steps, value_values, model)
plt.plot(model_steps, value_values, label=model)
plt.xlabel("Epoch")
plt.ylabel("Area under PR-curve")
plt.legend()
plt.grid(True)
plt.savefig("au_pr_score.png")


# Usage example
event_file = "event_files/events.out.tfevents.1730291782.gcn30.local.snellius.surf.nl.2180618.0" # Update with your TensorBoard event file path
event_file_dict = {
"DinoV2": "event_files/events.out.tfevents.1730291782.gcn30.local.snellius.surf.nl.2180618.0",
"EfficientNet": "event_files/events.out.tfevents.1730302269.gcn30.local.snellius.surf.nl.2222324.0",
}

# Extract data for each model
stepsLists, valuesLists, wall_times, models = zip(
*[
(
*plot_tensorboard_timeseries(
event_file_dict[model], "au_pr_curve/validation", epochs=120
),
model,
)
for model in event_file_dict.keys()
]
)


plot_scores(stepsLists, valuesLists, wall_times, models)
3 changes: 2 additions & 1 deletion neural_networks/pre_processing_for_ml.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ def process_fits(fits_path):
for fits_path in (root_dir / (cls + mode)).glob("*.fits")
)


class FitsDataset(Dataset):
def __init__(self, root_dir, mode="train"):
"""
Expand Down Expand Up @@ -168,7 +169,7 @@ def __init__(self, root_dir, mode="train"):
# print(f'{mode}: using the following sources: {sources}')

def compute_statistics(self, normalize):
cache = Memory(location=self.root_dir / '_cache')
cache = Memory(location=self.root_dir / "_cache")
cached_compute = cache.cache(FitsDataset._compute_statistics)
self.mean, self.std = cached_compute(self, normalize)
return self.mean, self.std
Expand Down
6 changes: 5 additions & 1 deletion neural_networks/train_nn.job
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash
#SBATCH --job-name=cortex_grid_search
#SBATCH -p gpu_h100
#SBATCH -p gpu
#SBATCH -t 08:00:00
#SBATCH --gpus 1
#SBATCH --output=out/multi_cortex%A_%a.out
Expand Down Expand Up @@ -52,8 +52,12 @@ else
EPOCHS="120"
fi

DATA_INPUT_PATH="/scratch-shared/CORTEX/"
# find $DATA_INPUT_PATH -name '*npz' | xargs -n 1 -P 18 -i rsync -R {} '/dev/shm/'

DATA_TRAINDATA_PATH="/scratch-shared/CORTEX/public.spider.surfsara.nl/lofarvwf/jdejong/CORTEX/calibrator_selection_robertjan/cnn_data/"


# Execute your Python script with the given parameters
echo $DATA_TRAINDATA_PATH --model $model --lr $lr --normalize $normalize --dropout_p $dropout_p --batch_size $batch_size --log_path grid_search_2 --label_smoothing $label_smoothing --rank $rank --resize $resize --alpha $alpha $LORA_ARG $STOCHASTIC_SMOOTHING -d --epochs $EPOCHS --lift $lift $FLIP_AUGMENTATIONS
python train_nn.py $DATA_TRAINDATA_PATH --model $model --lr $lr --normalize $normalize --dropout_p $dropout_p --batch_size $batch_size --log_path grid_search_2 --label_smoothing $label_smoothing --rank $rank --resize $resize --alpha $alpha $LORA_ARG $STOCHASTIC_SMOOTHING -d --epochs $EPOCHS --lift $lift $FLIP_AUGMENTATIONS
3 changes: 2 additions & 1 deletion neural_networks/train_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ def init_first_conv(conv):

def get_classifier(dropout_p: float, n_features: int, num_target_classes: int):
assert 0 <= dropout_p <= 1
print(n_features)

classifier = nn.Sequential(
nn.Flatten(),
Expand Down Expand Up @@ -800,7 +801,7 @@ def get_argparser():
"--normalize",
type=int,
help="Whether to do normalization",
default=0,
default=1,
choices=[0, 1, 2],
)
parser.add_argument(
Expand Down

0 comments on commit 0f6e718

Please sign in to comment.