Skip to content

Commit

Permalink
Merge pull request #146 from alan-turing-institute/rename
Browse files Browse the repository at this point in the history
Rename `grid_search` and `param_grid`
  • Loading branch information
mastoffel authored Feb 6, 2024
2 parents 845c41c + 6e00d8a commit d6adb8b
Show file tree
Hide file tree
Showing 15 changed files with 186 additions and 186 deletions.
26 changes: 13 additions & 13 deletions autoemulate/compare.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ def setup(
self,
X,
y,
use_grid_search=False,
grid_search_type="random",
grid_search_iters=20,
param_search=False,
param_search_type="random",
param_search_iters=20,
scale=True,
scaler=StandardScaler(),
reduce_dim=False,
Expand All @@ -53,13 +53,13 @@ def setup(
Simulation input.
y : array-like, shape (n_samples, n_outputs)
Simulation output.
use_grid_search : bool
param_search : bool
Whether to perform hyperparameter search over predifined parameter grids.
grid_search_type : str
param_search_type : str
Type of hyperparameter search to perform. Can be "grid", "random", or "bayes".
grid_search_iters : int
param_search_iters : int
Number of parameter settings that are sampled. Only used if
use_grid_search=True and grid_search_type="random".
param_search=True and param_search_type="random".
scale : bool, default=True
Whether to scale the data before fitting the models using a scaler.
scaler : sklearn.preprocessing.StandardScaler
Expand Down Expand Up @@ -99,9 +99,9 @@ def setup(
)
self.metrics = self._get_metrics(METRIC_REGISTRY)
self.cv = self._get_cv(CV_REGISTRY, fold_strategy, folds)
self.use_grid_search = use_grid_search
self.search_type = grid_search_type
self.grid_search_iters = grid_search_iters
self.param_search = param_search
self.search_type = param_search_type
self.param_search_iters = param_search_iters
self.scale = scale
self.scaler = scaler
self.n_jobs = n_jobs
Expand Down Expand Up @@ -185,15 +185,15 @@ def compare(self):

for i in range(len(self.models)):
# hyperparameter search
if self.use_grid_search:
if self.param_search:
self.models[i] = optimize_params(
X=self.X,
y=self.y,
cv=self.cv,
model=self.models[i],
search_type=self.search_type,
niter=self.grid_search_iters,
param_grid=None,
niter=self.param_search_iters,
param_space=None,
n_jobs=self.n_jobs,
logger=self.logger,
)
Expand Down
10 changes: 5 additions & 5 deletions autoemulate/emulators/gaussian_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,19 +68,19 @@ def predict(self, X, return_std=False):

def get_grid_params(self, search_type="random"):
"""Returns the grid parameters of the emulator."""
param_grid_random = {
param_space_random = {
"nugget": ["fit", "adaptive", "pivot"],
}
param_grid_bayes = {
param_space_bayes = {
"nugget": Categorical(["fit", "adaptive", "pivot"]),
}

if search_type == "random":
param_grid = param_grid_random
param_space = param_space_random
elif search_type == "bayes":
param_grid = param_grid_bayes
param_space = param_space_bayes

return param_grid
return param_space

def _more_tags(self):
return {"multioutput": False}
10 changes: 5 additions & 5 deletions autoemulate/emulators/gaussian_process_sk.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def predict(self, X, return_std=False):

def get_grid_params(self, search_type="random"):
"""Returns the grid parameters of the emulator."""
param_grid_random = {
param_space_random = {
"kernel": [
RBF(),
Matern(),
Expand All @@ -106,19 +106,19 @@ def get_grid_params(self, search_type="random"):
"alpha": loguniform(1e-10, 1e-2),
"normalize_y": [True],
}
param_grid_bayes = {
param_space_bayes = {
# "kernel": Categorical([RBF(), Matern()]), # unhashable type
"optimizer": Categorical(["fmin_l_bfgs_b"]),
"alpha": Real(1e-10, 1e-2, prior="log-uniform"),
"normalize_y": Categorical([True]),
}

if search_type == "random":
param_grid = param_grid_random
param_space = param_space_random
elif search_type == "bayes":
param_grid = param_grid_bayes
param_space = param_space_bayes

return param_grid
return param_space

def _more_tags(self):
return {"multioutput": True}
10 changes: 5 additions & 5 deletions autoemulate/emulators/gradient_boosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def predict(self, X):

def get_grid_params(self, search_type="random"):
"""Returns the grid parameters of the emulator."""
param_grid_random = {
param_space_random = {
"learning_rate": loguniform(0.01, 0.2),
"n_estimators": randint(100, 500),
"max_depth": randint(3, 8),
Expand All @@ -112,7 +112,7 @@ def get_grid_params(self, search_type="random"):
"ccp_alpha": loguniform(0.01, 0.1),
}

param_grid_bayes = {
param_space_bayes = {
"learning_rate": Real(0.01, 0.2, prior="log-uniform"),
"n_estimators": Integer(100, 500),
"max_depth": Integer(3, 8),
Expand All @@ -124,11 +124,11 @@ def get_grid_params(self, search_type="random"):
}

if search_type == "random":
param_grid = param_grid_random
param_space = param_space_random
elif search_type == "bayes":
param_grid = param_grid_bayes
param_space = param_space_bayes

return param_grid
return param_space

def _more_tags(self):
return {"multioutput": False}
10 changes: 5 additions & 5 deletions autoemulate/emulators/neural_net_sk.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def predict(self, X):

def get_grid_params(self, search_type="random"):
"""Returns the grid parameters of the emulator."""
param_grid_random = {
param_space_random = {
"hidden_layer_sizes": [
(50,),
(100,),
Expand All @@ -112,7 +112,7 @@ def get_grid_params(self, search_type="random"):
"learning_rate_init": loguniform(1e-4, 1e-2),
}

param_grid_bayes = {
param_space_bayes = {
# doesn't work with bayes
# "hidden_layer_sizes": Categorical([
# (50,),
Expand All @@ -128,11 +128,11 @@ def get_grid_params(self, search_type="random"):
}

if search_type == "random":
param_grid = param_grid_random
param_space = param_space_random
elif search_type == "bayes":
param_grid = param_grid_bayes
param_space = param_space_bayes

return param_grid
return param_space

def _more_tags(self):
return {"multioutput": True}
Expand Down
10 changes: 5 additions & 5 deletions autoemulate/emulators/neural_net_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def initialize_module(self, reason=None):
return self

def get_grid_params(self, search_type="random"):
param_grid_random = {
param_space_random = {
"lr": loguniform(1e-4, 1e-2),
"max_epochs": [10, 20, 30],
"module__hidden_sizes": [
Expand All @@ -153,17 +153,17 @@ def get_grid_params(self, search_type="random"):
],
}

param_grid_bayes = {
param_space_bayes = {
"lr": Real(1e-4, 1e-2, prior="log-uniform"),
"max_epochs": Integer(10, 30),
}

if search_type == "random":
param_grid = param_grid_random
param_space = param_space_random
elif search_type == "bayes":
param_grid = param_grid_bayes
param_space = param_space_bayes

return param_grid
return param_space

def __sklearn_is_fitted__(self):
return hasattr(self, "n_features_in_")
Expand Down
6 changes: 3 additions & 3 deletions autoemulate/emulators/polynomials.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,11 @@ def get_grid_params(self, search_type="random"):
The parameter grid for the model.
"""
if search_type == "random":
param_grid = {}
param_space = {}
elif search_type == "bayes":
param_grid = [({"degree": Categorical([2])}, 1)]
param_space = [({"degree": Categorical([2])}, 1)]

return param_grid
return param_space

def _more_tags(self):
return {"multioutput": True}
10 changes: 5 additions & 5 deletions autoemulate/emulators/random_forest.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def predict(self, X):
def get_grid_params(self, search_type="random"):
"""Returns the grid parameters of the emulator."""

param_grid_random = {
param_space_random = {
"n_estimators": randint(50, 500),
"min_samples_split": randint(2, 20),
"min_samples_leaf": randint(1, 10),
Expand All @@ -107,7 +107,7 @@ def get_grid_params(self, search_type="random"):
"max_samples": [None, 0.5, 0.75],
}

param_grid_bayes = {
param_space_bayes = {
"n_estimators": Integer(50, 500),
"min_samples_split": Integer(2, 20),
"min_samples_leaf": Integer(1, 10),
Expand All @@ -119,11 +119,11 @@ def get_grid_params(self, search_type="random"):
}

if search_type == "random":
param_grid = param_grid_random
param_space = param_space_random
elif search_type == "bayes":
param_grid = param_grid_bayes
param_space = param_space_bayes

return param_grid
return param_space

def _more_tags(self):
return {"multioutput": True}
Expand Down
12 changes: 6 additions & 6 deletions autoemulate/emulators/rbf.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,13 +85,13 @@ def predict(self, X):

def get_grid_params(self, search_type="random"):
"""Returns the grid parameters of the emulator."""
# param_grid_random = {
# param_space_random = {
# #"smoothing": uniform(0.0, 1.0),
# "kernel": ["linear", "thin_plate_spline", "cubic", "quintic", "multiquadric", "inverse_multiquadric", "gaussian"],
# #"epsilon": uniform(0.0, 1.0),
# "degree": randint(0, 5),
# }
param_grid_random = [
param_space_random = [
{
"kernel": ["linear", "multiquadric"],
"degree": randint(0, 3), # Degrees valid for these kernels
Expand All @@ -114,7 +114,7 @@ def get_grid_params(self, search_type="random"):
},
]

param_grid_bayes = [
param_space_bayes = [
{
"kernel": Categorical(["linear", "multiquadric"]),
"degree": Integer(0, 4), # Degrees valid for these kernels
Expand All @@ -138,11 +138,11 @@ def get_grid_params(self, search_type="random"):
]

if search_type == "random":
param_grid = param_grid_random
param_space = param_space_random
elif search_type == "bayes":
param_grid = param_grid_bayes
param_space = param_space_bayes

return param_grid
return param_space

def _more_tags(self):
return {"multioutput": True}
10 changes: 5 additions & 5 deletions autoemulate/emulators/support_vector_machines.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def predict(self, X):

def get_grid_params(self, search_type="random"):
"""Returns the grid paramaters for the emulator."""
param_grid_random = {
param_space_random = {
"kernel": ["rbf", "linear", "poly", "sigmoid"],
"degree": randint(2, 6),
"gamma": ["scale", "auto"],
Expand All @@ -139,7 +139,7 @@ def get_grid_params(self, search_type="random"):
"max_iter": [-1],
}

param_grid_bayes = {
param_space_bayes = {
"kernel": Categorical(["rbf", "linear", "poly", "sigmoid"]),
"degree": Integer(2, 5),
"gamma": Categorical(["scale", "auto"]),
Expand All @@ -154,11 +154,11 @@ def get_grid_params(self, search_type="random"):
}

if search_type == "random":
param_grid = param_grid_random
param_space = param_space_random
elif search_type == "bayes":
param_grid = param_grid_bayes
param_space = param_space_bayes

return param_grid
return param_space

def _more_tags(self):
return {"multioutput": False}
10 changes: 5 additions & 5 deletions autoemulate/emulators/xgboost.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def predict(self, X):

def get_grid_params(self, search_type="random"):
"""Returns the grid parameters of the emulator."""
param_grid_random = {
param_space_random = {
"booster": ["gbtree", "dart"],
"n_estimators": randint(100, 1000),
"max_depth": randint(3, 10),
Expand All @@ -148,7 +148,7 @@ def get_grid_params(self, search_type="random"):
"reg_lambda": loguniform(0.01, 1),
}

param_grid_bayes = {
param_space_bayes = {
"booster": Categorical(["gbtree", "dart"]),
"n_estimators": Integer(100, 1000),
"max_depth": Integer(3, 10),
Expand All @@ -165,11 +165,11 @@ def get_grid_params(self, search_type="random"):
}

if search_type == "random":
param_grid = param_grid_random
param_space = param_space_random
elif search_type == "bayes":
param_grid = param_grid_bayes
param_space = param_space_bayes

return param_grid
return param_space

def _more_tags(self):
return {"multioutput": True}
Loading

0 comments on commit d6adb8b

Please sign in to comment.