Skip to content

Commit

Permalink
moved initialization of optuna study to __init__
Browse files Browse the repository at this point in the history
  • Loading branch information
RichieHakim committed Mar 22, 2024
1 parent f24e34b commit 162f6b8
Showing 1 changed file with 25 additions and 26 deletions.
51 changes: 25 additions & 26 deletions bnpm/automatic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,31 @@ def safe_callback(study, trial):
else:
self.callback_wandb = None

# Set verbosity
if int(self.verbose) <= 1:
optuna.logging.set_verbosity(optuna.logging.WARNING)
elif int(self.verbose) == 2:
optuna.logging.set_verbosity(optuna.logging.INFO)
elif int(self.verbose) > 2:
optuna.logging.set_verbosity(optuna.logging.DEBUG)

# Initialize an Optuna study
if self.optuna_storage_url is not None:
storage = optuna.storages.RDBStorage(
url=self.optuna_storage_url,
engine_kwargs=self.optuna_engine_kwargs,
)
else:
storage = None
self.study = optuna.create_study(
direction="minimize",
pruner=optuna.pruners.MedianPruner(n_startup_trials=self.n_startup),
sampler=optuna.samplers.TPESampler(n_startup_trials=self.n_startup),
study_name='Autotuner' if self.optuna_storage_name is None else self.optuna_storage_name,
storage=storage,
load_if_exists=True,
)

# Initialize variables to store loss and best model
self.loss_running_train = []
self.loss_running_test = []
Expand Down Expand Up @@ -338,31 +363,6 @@ def fit(self) -> Union[sklearn.base.BaseEstimator, Optional[Dict[str, Any]]]:
best_params (Optional[Dict[str, Any]]):
The best parameters obtained from hyperparameter tuning.
"""
# Set verbosity
if int(self.verbose) <= 1:
optuna.logging.set_verbosity(optuna.logging.WARNING)
elif int(self.verbose) == 2:
optuna.logging.set_verbosity(optuna.logging.INFO)
elif int(self.verbose) > 2:
optuna.logging.set_verbosity(optuna.logging.DEBUG)

# Initialize an Optuna study
if self.optuna_storage_url is not None:
storage = optuna.storages.RDBStorage(
url=self.optuna_storage_url,
engine_kwargs=self.optuna_engine_kwargs,
)
else:
storage = None
self.study = optuna.create_study(
direction="minimize",
pruner=optuna.pruners.MedianPruner(n_startup_trials=self.n_startup),
sampler=optuna.samplers.TPESampler(n_startup_trials=self.n_startup),
study_name='Autotuner' if self.optuna_storage_name is None else self.optuna_storage_name,
storage=storage,
load_if_exists=True,
)

# Optimize the study
callbacks = [self.checker.check] + ([self.callback_wandb] if self.callback_wandb is not None else [])
self.study.optimize(
Expand Down Expand Up @@ -733,7 +733,6 @@ def __init__(
else:
raise ValueError('test_or_train must be either "test" or "train".')


def explainable_variance_ratio(self, v1, v2, sample_weight=None):
if isinstance(v1, torch.Tensor):
v1 = v1 - torch.nanmean(v1, dim=0)
Expand Down

0 comments on commit 162f6b8

Please sign in to comment.