Skip to content

Commit

Permalink
improve transport; add logger
Browse files Browse the repository at this point in the history
  • Loading branch information
mieskolainen committed Jul 26, 2024
1 parent 89628c9 commit d32e828
Show file tree
Hide file tree
Showing 10 changed files with 404 additions and 246 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
# Files ignored in GIT

*.pdf
*.pth
*.dat
Expand All @@ -8,6 +10,7 @@
*.pt
*.zip
*.text
*.log
#*.json
#*.txt

Expand Down
10 changes: 6 additions & 4 deletions configs/zee/models.yml
Original file line number Diff line number Diff line change
Expand Up @@ -176,11 +176,13 @@ iceboost_swd:

# Sliced Wasserstein distance [use with custom:binary_cross_entropy and custom:sliced_wasserstein]
SWD_param:
beta: 5.0e-3
beta: 3.0e-2
p: 1 # p-norm (1,2, ...)
num_slices: 500 # Number of MC projections (Higher the better)
mode: 'SWD' # 'SWD' (basic), 'EBSW' (see icefit/transport.py)
max_N: 400000 # Max events limit (400k & 500 slices works with 32 GB Nvidia V100)
vars: ['.*'] # Use all variables in SWD
#vars: ['fixedGridRhoAll', 'probe_eta', 'probe_pt'] # Use specific ones


# ICEBOOST with an additional re-weighting in-the-loop regularization
Expand Down Expand Up @@ -249,7 +251,7 @@ lzmlp0: &LZMLP
SWD_beta: 5.0e-3 # Sliced Wasserstein [reweighting regularization]
SWD_p: 1 # p-norm (1,2,..), 1 perhaps more robust
SWD_num_slices: 10000 # Number of MC projections (higher the better)
SWD_mode: 'SWD' # 'SWD' (basic), 'EBSW' (see icefit/transport.py)
SWD_mode: 'SWD' # 'SWD' (basic)

lipschitz_beta: 5.0e-5 # lipschitz regularization (use with 'lzmlp')
#logit_L1_beta: 1.0e-2 # logit norm reg. ~ beta * torch.sum(|logits|)
Expand Down Expand Up @@ -337,7 +339,7 @@ fastkan0: &FASTKAN
SWD_beta: 5.0e-3 # Sliced Wasserstein [reweighting regularization]
SWD_p: 1 # p-norm (1,2,..), 1 perhaps more robust
SWD_num_slices: 10000 # Number of MC projections (higher the better)
SWD_mode: 'SWD' # 'SWD' (basic), 'EBSW' (see icefit/transport.py)
SWD_mode: 'SWD' # 'SWD' (basic)

#lipshitz_beta: 1.0e-4 # Lipshitz regularization (use with 'lzmlp')
#logit_L1_beta: 1.0e-2 # logit norm reg. ~ beta * torch.sum(|logits|)
Expand Down Expand Up @@ -429,7 +431,7 @@ dmlp0: &DMLP
SWD_beta: 5.0e-3 # Sliced Wasserstein [reweighting regularization]
SWD_p: 1 # p-norm (1,2,..), 1 perhaps more robust
SWD_num_slices: 10000 # Number of MC projections (higher the better)
SWD_mode: 'SWD' # 'SWD' (basic), 'EBSW' (see icefit/transport.py)
SWD_mode: 'SWD' # 'SWD' (basic)

#logit_L1_beta: 1.0e-2 # logit norm reg. ~ lambda * torch.sum(|logits|)
logit_L2_beta: 5.0e-3 # logit norm reg. ~ lambda * torch.sum(logits**2)
Expand Down
Loading

0 comments on commit d32e828

Please sign in to comment.