diff --git a/examples/main.py b/examples/main.py index f332cff..f1abc13 100644 --- a/examples/main.py +++ b/examples/main.py @@ -20,7 +20,7 @@ f = synthetic_functions.StyblinskiTang.for_n_dimensions(3, seed=seed) cs = f.config_space -selected_hyperparameter = cs.get_hyperparameter("x1") +selected_hyperparameter = cs["x1"] # Sampler sampler = BayesianOptimizationSampler(f, cs, initial_points=f.ndim * 4, seed=seed) diff --git a/examples/main_meta_pdp.py b/examples/main_meta_pdp.py index 9a5af36..cb6fd09 100644 --- a/examples/main_meta_pdp.py +++ b/examples/main_meta_pdp.py @@ -85,7 +85,7 @@ def optimize_mc(): f = blackbox_function # Optimize - n_dim = len(cs.get_hyperparameters()) + n_dim = len(list(cs.values())) sampler = BayesianOptimizationSampler( f, cs, diff --git a/examples/main_paper.py b/examples/main_paper.py index 8dfdfbc..54fa1eb 100644 --- a/examples/main_paper.py +++ b/examples/main_paper.py @@ -10,7 +10,7 @@ from tqdm import tqdm from pyPDP.algorithms.partitioner.decision_tree_partitioner import DecisionTreePartitioner -from pyPDP.algorithms.ice import ICE, ICECurve +from pyPDP.algorithms.ice import ICE from pyPDP.algorithms.pdp import PDP from pyPDP.blackbox_functions import BlackboxFunction, BlackboxFunctionND from pyPDP.blackbox_functions.synthetic_functions import StyblinskiTang @@ -34,6 +34,7 @@ data_folder = Path(__file__).parent.parent / 'data' data_folder.mkdir(parents=True, exist_ok=True) + def figure_1_3(f: BlackboxFunction = StyblinskiTang.for_n_dimensions(2, seed=seed), samplers: Dict[str, Sampler] = None, sampled_points=50): diff --git a/examples/main_presentation.py b/examples/main_presentation.py index 358ee6a..3a60d30 100644 --- a/examples/main_presentation.py +++ b/examples/main_presentation.py @@ -9,7 +9,7 @@ from pyPDP.algorithms.ice import ICE from pyPDP.algorithms.partitioner.decision_tree_partitioner import DecisionTreePartitioner from pyPDP.algorithms.pdp import PDP -from pyPDP.blackbox_functions import BlackboxFunctionND, BlackboxFunction +from pyPDP.blackbox_functions import BlackboxFunction from pyPDP.blackbox_functions.synthetic_functions import StyblinskiTang from pyPDP.sampler.acquisition_function import LowerConfidenceBound from pyPDP.sampler.bayesian_optimization import BayesianOptimizationSampler diff --git a/examples/main_random_forest.py b/examples/main_random_forest.py index e29bc1a..f17b9e9 100644 --- a/examples/main_random_forest.py +++ b/examples/main_random_forest.py @@ -105,7 +105,7 @@ def plot_tree_data(log_filename: str, img_filename: str): # plt.boxplot(delta_mcs, positions=[0], manage_ticks=False) # plt.plot([], [], color='orange', label='Decision Tree') plt.plot(x, dt_mean, '*', color='red', label='Decision Tree Mean') - plt.plot(x, dt_mean + dt_std, '*', color='orange', label=f'Decision Tree $\mu\pm$ $\sigma$') + plt.plot(x, dt_mean + dt_std, '*', color='orange', label=f'Decision Tree $\\mu\\pm$ $\\sigma$') plt.plot(x, dt_mean - dt_std, '*', color='orange') # rf mean curve diff --git a/examples/main_sampler_analysis.py b/examples/main_sampler_analysis.py index 3c8d9c5..1b91d71 100644 --- a/examples/main_sampler_analysis.py +++ b/examples/main_sampler_analysis.py @@ -149,7 +149,7 @@ def plot_sampling_bias( )[0] ax_variances.plot(x, np.sqrt(mean_pdp.y_variances)) # Set titles - ax_pdp.set_title(f"{name}\n(mmd={np.mean(arr_mmd):.2f}$\pm${np.std(arr_mmd):.2f})") + ax_pdp.set_title(f"{name}\n(mmd={np.mean(arr_mmd):.2f}$\\pm${np.std(arr_mmd):.2f})") ax_variances.set_ylabel("Std") # fig1.savefig("Figure 1.png") diff --git a/pyPDP/__init__.py b/pyPDP/__init__.py index 5ff65bb..88fc2b0 100644 --- a/pyPDP/__init__.py +++ b/pyPDP/__init__.py @@ -1,8 +1,10 @@ name = "pyPDPPartitioner" package_name = "pyPDP" author = "Yannik Mahlau and Dominik Woiwode" -author_email = "dominik.woiwode@stud.uni-hannover.de" -description = "A python implementation of 'Explaining Hyperparameter Optimization via Partial Dependence Plots' by Moosbauer et al." -version = "0.1.8" +author_email = "woiwode@tnt.uni-hannover.de" +description = ("A python implementation of " + "'Explaining Hyperparameter Optimization via Partial Dependence Plots'" + " by Moosbauer et al.") +version = "0.1.9" license = "MIT" url = "https://github.com/dwoiwode/py-pdp-partitioner" diff --git a/pyPDP/algorithms/__init__.py b/pyPDP/algorithms/__init__.py index 6bd6dfe..b270e4e 100644 --- a/pyPDP/algorithms/__init__.py +++ b/pyPDP/algorithms/__init__.py @@ -51,4 +51,4 @@ def n_selected_hyperparameter(self) -> int: @property def num_features(self) -> int: - return len(self.config_space.get_hyperparameters()) + return len(list(self.config_space.values())) diff --git a/pyPDP/algorithms/ice.py b/pyPDP/algorithms/ice.py index 2ca2433..c2e3c00 100644 --- a/pyPDP/algorithms/ice.py +++ b/pyPDP/algorithms/ice.py @@ -1,6 +1,6 @@ from dataclasses import dataclass from functools import cached_property -from typing import Optional, List +from typing import Optional import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH @@ -142,7 +142,7 @@ def implied_config_space(self) -> CS.ConfigurationSpace: min_values = unscale(np.min(self.x_ice, axis=0), self.full_config_space) max_values = unscale(np.max(self.x_ice, axis=0), self.full_config_space) cs = CS.ConfigurationSpace() - for hp, min_, max_ in zip(self.full_config_space.get_hyperparameters(), min_values, max_values): + for hp, min_, max_ in zip(list(self.full_config_space.values()), min_values, max_values): assert isinstance(hp, CSH.NumericalHyperparameter) if min_ == max_: hp_copy = CSH.Constant(hp.name, value=min_) @@ -202,7 +202,7 @@ def _calculate(self): # Retrieve hp index from cs cs = self.config_space idx = get_selected_idx(self.selected_hyperparameter, cs) - num_features = len(cs.get_hyperparameters()) + num_features = len(list(cs.values())) # retrieve x-values from config x_s = self.grid_points diff --git a/pyPDP/algorithms/partitioner/__init__.py b/pyPDP/algorithms/partitioner/__init__.py index 514793d..e28f0a7 100644 --- a/pyPDP/algorithms/partitioner/__init__.py +++ b/pyPDP/algorithms/partitioner/__init__.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from functools import cached_property -from typing import Tuple, Optional, List, Callable +from typing import Tuple, Optional, List import ConfigSpace as CS import numpy as np @@ -68,7 +68,7 @@ def negative_log_likelihood(self, true_function: BlackboxFunction) -> float: selected_hyperparameter_names = {hp.name for hp in self.selected_hyperparameter} not_selected_hp = [ hp - for hp in true_function.config_space.get_hyperparameters() + for hp in list(true_function.config_space.values()) if hp.name not in selected_hyperparameter_names ] @@ -127,6 +127,7 @@ def plot_confidences(self, confidence_max_sigma=confidence_max_sigma, ax=ax) + class Partitioner(Algorithm, ABC): def __init__(self, surrogate_model: SurrogateModel, selected_hyperparameter: SelectedHyperparameterType, @@ -159,7 +160,7 @@ def __init__(self, surrogate_model: SurrogateModel, selected_hyperparameter_names = {hp.name for hp in self.selected_hyperparameter} selected_hyperparameter_names = selected_hyperparameter_names.union({hp.name for hp in self.not_splittable_hp}) self.possible_split_parameters: List[CSH.Hyperparameter] = [ - hp for hp in cs.get_hyperparameters() + hp for hp in list(cs.values()) if hp.name not in selected_hyperparameter_names ] @@ -175,5 +176,5 @@ def ice(self) -> ICE: return self._ice @abstractmethod - def partition(self, max_depth: int = 1): # -> List[Region]: + def partition(self, max_depth: int = 1): # -> List[Region]: pass diff --git a/pyPDP/algorithms/partitioner/decision_tree_partitioner.py b/pyPDP/algorithms/partitioner/decision_tree_partitioner.py index bdcc84c..b87e282 100644 --- a/pyPDP/algorithms/partitioner/decision_tree_partitioner.py +++ b/pyPDP/algorithms/partitioner/decision_tree_partitioner.py @@ -10,7 +10,7 @@ from pyPDP.surrogate_models import SurrogateModel from pyPDP.utils.plotting import get_ax, check_and_set_axis, get_random_color, plot_config_space from pyPDP.utils.typing import SelectedHyperparameterType, ColorType -from pyPDP.utils.utils import scale_float, unscale_float, unscale, ConfigSpaceHolder, get_hyperparameters +from pyPDP.utils.utils import scale_float, unscale_float, unscale, ConfigSpaceHolder class SplitCondition(ConfigSpaceHolder): @@ -97,7 +97,7 @@ def __contains__(self, item: CS.Configuration) -> bool: def implied_config_space(self, seed: Optional[int] = None) -> CS.ConfigurationSpace: # copy cs hp_dic = {} - for hp in self.config_space.get_hyperparameters(): + for hp in list(self.config_space.values()): if isinstance(hp, CSH.NumericalHyperparameter): new_hp = CSH.UniformFloatHyperparameter(hp.name, lower=hp.lower, upper=hp.upper, log=hp.log) hp_dic[hp.name] = new_hp @@ -331,6 +331,6 @@ def plot_incumbent_cs(self, ax = get_ax(ax) region = self.get_incumbent_region(incumbent) new_cs = region.implied_config_space() - all_hp = new_cs.get_hyperparameters() + all_hp = list(new_cs.values()) not_selected_hp = sorted(list(set(all_hp) - set(self.selected_hyperparameter)), key=lambda hp: hp.name) plot_config_space(new_cs, x_hyperparameters=not_selected_hp, color=color, alpha=alpha, ax=ax) diff --git a/pyPDP/algorithms/partitioner/random_forest_partitioner.py b/pyPDP/algorithms/partitioner/random_forest_partitioner.py index eeafd4a..9598ba6 100644 --- a/pyPDP/algorithms/partitioner/random_forest_partitioner.py +++ b/pyPDP/algorithms/partitioner/random_forest_partitioner.py @@ -88,7 +88,11 @@ def partition(self, not_splittable_hp = list(set(self.possible_split_parameters) - set(splittable_hp)) # create dt - dt = DecisionTreePartitioner.from_ICE(subset_ice, min_points_per_node=1, not_splittable_hp=not_splittable_hp) + dt = DecisionTreePartitioner.from_ICE( + subset_ice, + min_points_per_node=1, + not_splittable_hp=not_splittable_hp + ) dt.partition(max_depth=max_depth) self.trees.append(dt) diff --git a/pyPDP/algorithms/pdp.py b/pyPDP/algorithms/pdp.py index e4a9ff2..6e5029b 100644 --- a/pyPDP/algorithms/pdp.py +++ b/pyPDP/algorithms/pdp.py @@ -1,7 +1,6 @@ from functools import cached_property -from typing import Iterable, Optional +from typing import Optional -import ConfigSpace.hyperparameters as CSH import numpy as np from matplotlib import pyplot as plt diff --git a/pyPDP/blackbox_functions/__init__.py b/pyPDP/blackbox_functions/__init__.py index 4d54167..20d071d 100644 --- a/pyPDP/blackbox_functions/__init__.py +++ b/pyPDP/blackbox_functions/__init__.py @@ -10,7 +10,7 @@ class BlackboxFunction(ConfigSpaceHolder, ABC): def __init__(self, config_space: CS.ConfigurationSpace): super().__init__(config_space, seed=True) - self.ndim = len(self.config_space.get_hyperparameters()) + self.ndim = len(list(self.config_space.values())) self.__name__ = str(self) def __call__(self, **kwargs) -> float: diff --git a/pyPDP/blackbox_functions/synthetic_functions.py b/pyPDP/blackbox_functions/synthetic_functions.py index e17eb41..ee71150 100644 --- a/pyPDP/blackbox_functions/synthetic_functions.py +++ b/pyPDP/blackbox_functions/synthetic_functions.py @@ -1,7 +1,7 @@ """ Collection of blackbox functions that can be minimized """ -from typing import Union, List, Tuple +from typing import Union import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH @@ -116,7 +116,7 @@ class StyblinskiTang(BlackboxFunctionND): """ def value_from_config(self, config: CS.Configuration) -> float: - x = np.asarray([config[hp.name] for hp in self.config_space.get_hyperparameters()]) + x = np.asarray([config[hp.name] for hp in list(self.config_space.values())]) return np.sum(np.power(x, 4) - 16 * np.power(x, 2) + 5 * x) / 2 @@ -124,9 +124,12 @@ def value_from_config(self, config: CS.Configuration) -> float: def _styblinski_tang_integral(x: float) -> float: return 0.5 * (0.2 * np.power(x, 5) - 16 / 3 * np.power(x, 3) + 2.5 * np.power(x, 2)) - def pd_integral(self, *hyperparameters: Union[str, CSH.Hyperparameter], seed=None, - return_offset: bool = False) -> Union[ - CallableBlackboxFunction, tuple[CallableBlackboxFunction, float]]: + def pd_integral( + self, + *hyperparameters: Union[str, CSH.Hyperparameter], + seed=None, + return_offset: bool = False + ) -> Union[CallableBlackboxFunction, tuple[CallableBlackboxFunction, float]]: if len(hyperparameters) == 0: raise ValueError("Requires at least one hyperparameter for pd_integral") @@ -141,7 +144,7 @@ def pd_integral(self, *hyperparameters: Union[str, CSH.Hyperparameter], seed=Non integral_value = self._styblinski_tang_integral(upper) - self._styblinski_tang_integral(lower) integral_offset += integral_value / (upper - lower) - hps = self.config_space.get_hyperparameters() + hps = list(self.config_space.values()) reduced_cs = CS.ConfigurationSpace(seed=seed) hyperparameter_names = {hp.name for hp in hyperparameters} for hp in hps: @@ -158,8 +161,3 @@ def integral(config: CS.Configuration): else: return (CallableBlackboxFunction(integral, reduced_cs, name=f"{self.__name__} d({hyperparameter_names})"), integral_offset) - - - - - diff --git a/pyPDP/sampler/__init__.py b/pyPDP/sampler/__init__.py index 24f073f..ca374bd 100644 --- a/pyPDP/sampler/__init__.py +++ b/pyPDP/sampler/__init__.py @@ -49,7 +49,7 @@ def __del__(self): def _hash(self, *args) -> str: md = hashlib.md5() md.update(bytes(str(self.__class__), encoding="latin")) - md.update(bytes(str(self.config_space.get_hyperparameters()), encoding="latin")) + md.update(bytes(str(list(self.config_space.values())), encoding="latin")) for arg in args: md.update(bytes(str(arg), encoding="latin")) md.update(bytes(str(self.obj_func), encoding="latin")) diff --git a/pyPDP/sampler/acquisition_function.py b/pyPDP/sampler/acquisition_function.py index c53df82..4f59860 100644 --- a/pyPDP/sampler/acquisition_function.py +++ b/pyPDP/sampler/acquisition_function.py @@ -41,7 +41,8 @@ def _get_optimum_uniform_distribution(self) -> Tuple[CS.Configuration, float]: return max(config_value_pairs, key=lambda x: x[1]) - def convert_configs(self, configuration: Union[CS.Configuration, np.ndarray]): + @staticmethod + def convert_configs(configuration: Union[CS.Configuration, np.ndarray]): if isinstance(configuration, CS.Configuration): x = np.asarray(configuration.get_array()) x = x.reshape([1, -1]) @@ -87,8 +88,14 @@ def plot(self, ax.plot(x, acquisition_y, color=color_acquisition, label=self.__class__.__name__) if show_optimum: - ax.plot(list(optimum.values())[0], self(optimum), "*", color=color_optimum, label=f"Optimum ({optimum})", - markersize=15) + ax.plot( + list(optimum.values())[0], + self(optimum), + "*", + color=color_optimum, + label=f"Optimum ({optimum})", + markersize=15 + ) elif n_hyperparameters == 2: # 2D idx = get_selected_idx(x_hyperparameters, self.config_space) raise NotImplementedError("2D currently not implemented (#TODO)") @@ -166,6 +173,7 @@ def update(self, eta: float): class LowerConfidenceBound(AcquisitionFunction): """LCB""" + def __init__(self, config_space: CS.ConfigurationSpace, surrogate_model: SurrogateModel, diff --git a/pyPDP/sampler/bayesian_optimization.py b/pyPDP/sampler/bayesian_optimization.py index cd7a0a1..706e13d 100644 --- a/pyPDP/sampler/bayesian_optimization.py +++ b/pyPDP/sampler/bayesian_optimization.py @@ -1,6 +1,6 @@ import hashlib import warnings -from typing import Callable, Any, List, Tuple, Union, Optional +from typing import Callable, Any, List, Tuple, Union, Optional, Type, Dict import ConfigSpace as CS import numpy as np @@ -14,15 +14,17 @@ class BayesianOptimizationSampler(Sampler): - def __init__(self, - obj_func: Callable[[Any], float], - config_space: CS.ConfigurationSpace, - surrogate_model: Optional[SurrogateModel] = None, - initial_points: int = 5, - acq_class=None, - acq_class_kwargs=None, - minimize_objective: bool = True, - seed=None): + def __init__( + self, + obj_func: Callable[[Any], float], + config_space: CS.ConfigurationSpace, + surrogate_model: Optional[SurrogateModel] = None, + initial_points: int = 5, + acq_class: Optional[Type[AcquisitionFunction]] = None, + acq_class_kwargs: Optional[Dict[str, Any]] = None, + minimize_objective: bool = True, + seed=None + ): super().__init__(obj_func, config_space, minimize_objective, seed=seed) # Initialize class self.initial_points = initial_points # number of initial points to be sampled @@ -39,11 +41,13 @@ def __init__(self, acq_class_kwargs = {} if acq_class is None: acq_class = LowerConfidenceBound # Default Lower Confidence Bound - self.acq_func: AcquisitionFunction = acq_class(self.config_space, - self.surrogate_model, - minimize_objective=minimize_objective, - seed=seed, - **acq_class_kwargs) + self.acq_func: AcquisitionFunction = acq_class( + self.config_space, + self.surrogate_model, + minimize_objective=minimize_objective, + seed=seed, + **acq_class_kwargs + ) # Update cache according to additional arguments self.hash = self._hash(seed, acq_class, acq_class_kwargs, initial_points, surrogate_model.__class__) diff --git a/pyPDP/sampler/grid_sampler.py b/pyPDP/sampler/grid_sampler.py index 8d04f2e..f8ad3b5 100644 --- a/pyPDP/sampler/grid_sampler.py +++ b/pyPDP/sampler/grid_sampler.py @@ -31,9 +31,9 @@ def __init__( def _sample(self, n_points: int = 1, pbar: Union[ProgressDummy, tqdm] = ProgressDummy()): expected_length = len(self) + n_points if self._grid is None or len(self) + len(self._grid) < expected_length: - n_dims = len(self.config_space.get_hyperparameters()) + n_dims = len(list(self.config_space.values())) samplers_per_axis = int(np.ceil(expected_length ** (1 / n_dims))) - num_steps_dict = {param.name: samplers_per_axis for param in self.config_space.get_hyperparameters()} + num_steps_dict = {param.name: samplers_per_axis for param in list(self.config_space.values())} self._grid = generate_grid(self.config_space, num_steps_dict) self.rng.shuffle(self._grid) diff --git a/pyPDP/surrogate_models/__init__.py b/pyPDP/surrogate_models/__init__.py index 88da34e..d2b3d77 100644 --- a/pyPDP/surrogate_models/__init__.py +++ b/pyPDP/surrogate_models/__init__.py @@ -5,8 +5,8 @@ import numpy as np from matplotlib import pyplot as plt -from pyPDP.utils.plotting import get_ax, check_and_set_axis, plot_1D_confidence_color_gradients, plot_1D_confidence_lines, \ - plot_line +from pyPDP.utils.plotting import get_ax, check_and_set_axis, plot_1D_confidence_color_gradients, \ + plot_1D_confidence_lines, plot_line from pyPDP.utils.typing import ColorType from pyPDP.utils.utils import ConfigSpaceHolder, config_list_to_array, get_uniform_distributed_ranges @@ -16,9 +16,10 @@ def __init__(self, cs: CS.ConfigurationSpace, seed=None): super().__init__(cs, seed=seed) self.num_fitted_points = 0 - def __call__(self, - X: Union[np.ndarray, CS.Configuration, List[CS.Configuration]] - ) -> Union[np.ndarray, float, List[float]]: + def __call__( + self, + X: Union[np.ndarray, CS.Configuration, List[CS.Configuration]] + ) -> Union[np.ndarray, float, List[float]]: # Config or List[Config] or empty list if isinstance(X, CS.Configuration): means, stds = self.predict_config(X) @@ -45,7 +46,11 @@ def predict(self, X: np.ndarray) -> tuple[np.ndarray, np.ndarray]: """ pass - def fit(self, X: Union[List[CS.Configuration], np.ndarray], y: Union[List[float], np.ndarray]): + def fit( + self, + X: Union[List[CS.Configuration], np.ndarray], + y: Union[List[float], np.ndarray] + ): X = config_list_to_array(X) self.num_fitted_points = len(y) return self._fit(X, np.asarray(y)) @@ -54,8 +59,10 @@ def fit(self, X: Union[List[CS.Configuration], np.ndarray], y: Union[List[float] def _fit(self, X: np.ndarray, y: np.ndarray): pass - def predict_configs(self, - configs: List[CS.Configuration]) -> Tuple[List[float], List[float]]: + def predict_configs( + self, + configs: List[CS.Configuration] + ) -> Tuple[List[float], List[float]]: """ If configs is a single config: Return a single mean, std. If configs is a list of configs: Return a tuple with list of means and list of stds @@ -73,6 +80,20 @@ def predict_config(self, config: CS.Configuration) -> Tuple[float, float]: assert isinstance(std[0], float) return mean[0], std[0] + def get_incumbent( + self, + n_samples_for_optimization: int = 1000, + *, + minimize: bool = True + ) -> Tuple[CS.Configuration, float]: + configs = self.config_space.sample_configuration(n_samples_for_optimization) + values = self(configs) + config_value_pairs = [(config, value) for config, value in zip(configs, values)] + + if minimize: + return min(config_value_pairs, key=lambda x: x[1]) + return max(config_value_pairs, key=lambda x: x[1]) + def plot_means( self, color: ColorType = "blue", @@ -81,7 +102,7 @@ def plot_means( ): ax = get_ax(ax) - hyperparameters = self.config_space.get_hyperparameters() + hyperparameters = list(self.config_space.values()) n_hyperparameters = len(hyperparameters) assert n_hyperparameters < 3, 'Surrogate model only supports plotting less than 3 feature dimensions' @@ -95,7 +116,7 @@ def plot_means( name = self.__class__.__name__ x = ranges[0] - plot_line(x, mu, color=color, label=f"{name}-$\mu$", ax=ax) + plot_line(x, mu, color=color, label=f"{name}-$\\mu$", ax=ax) elif n_hyperparameters == 2: # 2D x = ranges[0] y = ranges[1] @@ -116,7 +137,7 @@ def plot_confidences( ): ax = get_ax(ax) - hyperparameters = self.config_space.get_hyperparameters() + hyperparameters = list(self.config_space.values()) n_hyperparameters = len(hyperparameters) assert n_hyperparameters < 3, 'Surrogate model only supports plotting less than 3 feature dimensions' diff --git a/pyPDP/surrogate_models/sklearn_surrogates.py b/pyPDP/surrogate_models/sklearn_surrogates.py index 2f733b3..7a313f6 100644 --- a/pyPDP/surrogate_models/sklearn_surrogates.py +++ b/pyPDP/surrogate_models/sklearn_surrogates.py @@ -26,6 +26,7 @@ def predict(self, X: np.ndarray) -> tuple[np.ndarray, np.ndarray]: return self.pipeline.predict(X, return_std=True) + class GaussianProcessSurrogate(SkLearnPipelineSurrogate): def __init__(self, cs: CS.ConfigurationSpace, kernel=Matern(nu=1.5), seed=None): pipeline = Pipeline([ diff --git a/pyPDP/utils/advanced_plots.py b/pyPDP/utils/advanced_plots.py index a94c3e3..9fefebc 100644 --- a/pyPDP/utils/advanced_plots.py +++ b/pyPDP/utils/advanced_plots.py @@ -88,7 +88,7 @@ def plot_hyperparameter_array_1D( seed=0 ) -> plt.Figure: cs = surrogate_model.config_space - hyperparameters = cs.get_hyperparameters() + hyperparameters = list(cs.values()) w, h = as_quadratic_shape_as_possible_for_n_figures(len(hyperparameters)) fig, axs = plt.subplots(h, w, sharey="all", figsize=(w * fig_res, h * fig_res)) @@ -114,7 +114,7 @@ def plot_hyperparameter_array_2D( seed=0 ) -> Tuple[plt.Figure, plt.Figure]: cs = surrogate_model.config_space - hyperparameters = cs.get_hyperparameters() + hyperparameters = list(cs.values()) n = len(hyperparameters) fig_mean, axs_mean = plt.subplots(n, n, figsize=(n * fig_res, n * fig_res)) fig_std, axs_std = plt.subplots(n, n, figsize=(n * fig_res, n * fig_res)) diff --git a/pyPDP/utils/plotting.py b/pyPDP/utils/plotting.py index 415b7e2..be77f84 100644 --- a/pyPDP/utils/plotting.py +++ b/pyPDP/utils/plotting.py @@ -133,7 +133,7 @@ def plot_function(f: Callable[[Any], float], ax = get_ax(ax) color = get_color(color) - hps = cs.get_hyperparameters() + hps = list(cs.values()) constants = {hp.name: hp.value for hp in hps if isinstance(hp, CSH.Constant)} parameters = [hp for hp in hps if not isinstance(hp, CSH.Constant)] n_parameter = len(parameters) @@ -216,16 +216,16 @@ def plot_1D_confidence_lines( # Handle zero first if 0 in k_sigmas: - ax.plot(x, means, color=color, alpha=0.3, label=f"{name}-$\mu$") + ax.plot(x, means, color=color, alpha=0.3, label=f"{name}-$\\mu$") for k_sigma in sorted(k_sigmas): # Sort for order in labels if k_sigma == 0: continue # If int omit decimal points, if float: print 2 decimal points if isinstance(k_sigma, int): - label = f"{name}-$\mu\pm${k_sigma}$\sigma$" + label = f"{name}-$\\mu\\pm${k_sigma}$\\sigma$" else: - label = f"{name}-$\mu\pm${k_sigma:.2f}$\sigma$" + label = f"{name}-$\\mu\\pm${k_sigma:.2f}$\\sigma$" ax.plot(x, means - k_sigma * stds, color=color, alpha=1 / k_sigma * 0.2, label=label) ax.plot(x, means + k_sigma * stds, color=color, alpha=1 / k_sigma * 0.2) diff --git a/pyPDP/utils/typing.py b/pyPDP/utils/typing.py index ad7165b..932b890 100644 --- a/pyPDP/utils/typing.py +++ b/pyPDP/utils/typing.py @@ -2,4 +2,4 @@ import ConfigSpace.hyperparameters as CSH ColorType = Union[str, Tuple[float, float, float]] -SelectedHyperparameterType = Union[CSH.Hyperparameter, str, Iterable[Union[str, CSH.Hyperparameter]]] \ No newline at end of file +SelectedHyperparameterType = Union[CSH.Hyperparameter, str, Iterable[Union[str, CSH.Hyperparameter]]] diff --git a/pyPDP/utils/utils.py b/pyPDP/utils/utils.py index 4e62f14..69d66b6 100644 --- a/pyPDP/utils/utils.py +++ b/pyPDP/utils/utils.py @@ -3,6 +3,7 @@ import math import time from abc import ABC +from copy import deepcopy from typing import List, Iterable, Union, Optional import ConfigSpace as CS @@ -59,7 +60,7 @@ def scale_float( cs: CS.ConfigurationSpace, hp: CSH.NumericalHyperparameter ) -> float: - cs_hp = cs.get_hyperparameter(hp.name) + cs_hp = cs[hp.name] if cs_hp.log: log_lower = np.log(cs_hp.lower) log_upper = np.log(cs_hp.upper) @@ -77,7 +78,7 @@ def unscale_float( cs: CS.ConfigurationSpace, hp: CSH.Hyperparameter ) -> float: - cs_hp = cs.get_hyperparameter(hp.name) + cs_hp = cs[hp.name] if cs_hp.log: log_lower = np.log(cs_hp.lower) log_upper = np.log(cs_hp.upper) @@ -95,7 +96,7 @@ def unscale(x: np.ndarray, cs: CS.ConfigurationSpace) -> np.ndarray: """ x_copy = x.copy() num_dims = len(x.shape) - for i, hp in enumerate(cs.get_hyperparameters()): + for i, hp in enumerate(list(cs.values())): if isinstance(hp, CSH.NumericalHyperparameter): if hp.log: unscaler = lambda values: \ @@ -134,19 +135,19 @@ def get_hyperparameters(hyperparameters: Optional[SelectedHyperparameterType], cs: CS.ConfigurationSpace) -> List[CSH.Hyperparameter]: if hyperparameters is None: # None -> All hyperparameters in cs - return list(cs.get_hyperparameters()) + return list(list(cs.values())) elif isinstance(hyperparameters, CSH.Hyperparameter): # Single Hyperparameter return [hyperparameters] elif isinstance(hyperparameters, str): # Single Hyperparameter name - return [cs.get_hyperparameter(hyperparameters)] + return [cs[hyperparameters]] else: # Either list of names or list of Hyperparameters hps = [] for hp in hyperparameters: if isinstance(hp, str): - hps.append(cs.get_hyperparameter(hp)) + hps.append(cs[hp]) elif isinstance(hp, CSH.Hyperparameter): hps.append(hp) else: @@ -167,7 +168,7 @@ def get_uniform_distributed_ranges( """ ranges = [] if isinstance(cs, CS.ConfigurationSpace): - cs = cs.get_hyperparameters() + cs = list(cs.values()) for parameter in cs: assert isinstance(parameter, CSH.NumericalHyperparameter) if scaled: @@ -213,7 +214,7 @@ def convert_hyperparameters( hps = [] for hp in hyperparameters: if isinstance(hp, str): - hps.append(config_space.get_hyperparameter(hp)) + hps.append(config_space[hp]) elif isinstance(hp, CSH.Hyperparameter): hps.append(hp) else: @@ -222,23 +223,8 @@ def convert_hyperparameters( def copy_config_space(cs: CS.ConfigurationSpace, *, seed=None) -> CS.ConfigurationSpace: - # copy cs - hp_dic = {} - for hp in cs.get_hyperparameters(): - if isinstance(hp, CSH.NumericalHyperparameter): - new_hp = hp.__class__(hp.name, lower=hp.lower, upper=hp.upper, log=hp.log) - hp_dic[hp.name] = new_hp - elif isinstance(hp, CSH.CategoricalHyperparameter): - new_hp = hp.__class__(hp.name, choices=hp.choices[:]) # Copy choices - hp_dic[hp.name] = new_hp # TODO: Test copy categorical hp and unscaler - else: - raise TypeError(f"Currently not support hyperparameter-type {type(hp)}") - - # add new hp to new cs - cs_copy = CS.ConfigurationSpace(seed=seed) - for hp in hp_dic.values(): - cs_copy.add_hyperparameter(hp) - + cs_copy = deepcopy(cs) + cs_copy.seed(seed) return cs_copy diff --git a/setup.py b/setup.py index a64d2ca..669546c 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ ], "examples": [ "pandas>=1.3.5", - "openml>=0.12.2", + "openml==0.12.2", # "HPOBench @ git+https://github.com/automl/HPOBench.git", ] } diff --git a/tests/algorithms/partitioner/test_DTNode.py b/tests/algorithms/partitioner/test_DTNode.py index cd29690..194f320 100644 --- a/tests/algorithms/partitioner/test_DTNode.py +++ b/tests/algorithms/partitioner/test_DTNode.py @@ -2,7 +2,6 @@ from pyPDP.algorithms.ice import ICE from pyPDP.algorithms.partitioner.decision_tree_partitioner import DecisionTreePartitioner -from pyPDP.blackbox_functions import synthetic_functions, config_space_nd from pyPDP.blackbox_functions.synthetic_functions import Square from pyPDP.sampler.acquisition_function import LowerConfidenceBound from pyPDP.sampler.bayesian_optimization import BayesianOptimizationSampler @@ -17,7 +16,7 @@ def setUp(self) -> None: # Static paper configurations (not changed throughout the paper) self.cs = f.config_space - self.selected_hyperparameter = self.cs.get_hyperparameter("x1") + self.selected_hyperparameter = self.cs["x1"] self.surrogate_model = GaussianProcessSurrogate(self.cs) bo = BayesianOptimizationSampler(f, self.cs, @@ -50,8 +49,8 @@ def test_implied_cs(self): root_cs = root.implied_config_space(seed=0) # root cs should be the same as the original - for hp in root_cs.get_hyperparameters(): - original_hp = self.cs.get_hyperparameter(hp.name) + for hp in list(root_cs.values()): + original_hp = self.cs[hp.name] self.assertEqual(hp.upper, original_hp.upper) self.assertEqual(hp.lower, original_hp.lower) @@ -59,8 +58,8 @@ def test_implied_cs(self): for leaf in self.partitioner.leaves: leaf_cs = leaf.implied_config_space(seed=0) is_different = False - for hp in leaf_cs.get_hyperparameters(): - original_hp = self.cs.get_hyperparameter(hp.name) + for hp in list(leaf_cs.values()): + original_hp = self.cs[hp.name] if original_hp.lower != hp.lower or original_hp.upper != hp.upper: is_different = True self.assertTrue(is_different) diff --git a/tests/algorithms/partitioner/test_neg_log_likelihood.py b/tests/algorithms/partitioner/test_neg_log_likelihood.py index bd07ebd..9810aee 100644 --- a/tests/algorithms/partitioner/test_neg_log_likelihood.py +++ b/tests/algorithms/partitioner/test_neg_log_likelihood.py @@ -11,7 +11,7 @@ def test_nll(self): cs = f.config_space n = 80 tau = 1 - selected_hyperparameter = cs.get_hyperparameter("x1") + selected_hyperparameter = cs["x1"] # Bayesian random_sampler = BayesianOptimizationSampler(f, cs, @@ -19,7 +19,10 @@ def test_nll(self): acq_class_kwargs={"tau": tau}) random_sampler.sample(n) - dt_partitioner = DecisionTreePartitioner.from_random_points(random_sampler.surrogate_model, selected_hyperparameter) + dt_partitioner = DecisionTreePartitioner.from_random_points( + random_sampler.surrogate_model, + selected_hyperparameter + ) leaf_list = dt_partitioner.partition(max_depth=1) best_region = dt_partitioner.get_incumbent_region(random_sampler.incumbent[0]) diff --git a/tests/algorithms/partitioner/test_partitioner.py b/tests/algorithms/partitioner/test_partitioner.py index 5e61b37..5911078 100644 --- a/tests/algorithms/partitioner/test_partitioner.py +++ b/tests/algorithms/partitioner/test_partitioner.py @@ -15,7 +15,7 @@ def test_dt_partitioner_single_split(self): self.initialize_figure() f = Square.for_n_dimensions(2) cs = f.config_space - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo = BayesianOptimizationSampler(f, config_space=cs) bo.sample(10) diff --git a/tests/algorithms/partitioner/test_rf_partitioner.py b/tests/algorithms/partitioner/test_rf_partitioner.py index 325df5b..6721b38 100644 --- a/tests/algorithms/partitioner/test_rf_partitioner.py +++ b/tests/algorithms/partitioner/test_rf_partitioner.py @@ -11,7 +11,7 @@ def test_rf_simple(self): self.initialize_figure() f = Square.for_n_dimensions(2) cs = f.config_space - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo = BayesianOptimizationSampler(f, config_space=cs) bo.sample(50) @@ -33,7 +33,7 @@ def test_rf_3D_single_split(self): self.initialize_figure() f = StyblinskiTang.for_n_dimensions(3, seed=32) cs = f.config_space - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo = BayesianOptimizationSampler(f, config_space=cs) bo.sample(80) @@ -55,7 +55,7 @@ def test_rf_3D_two_splits(self): self.initialize_figure() f = StyblinskiTang.for_n_dimensions(3, seed=32) cs = f.config_space - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo = BayesianOptimizationSampler(f, config_space=cs) bo.sample(80) diff --git a/tests/algorithms/partitioner/test_split_condition.py b/tests/algorithms/partitioner/test_split_condition.py index 6b66bd1..4852eed 100644 --- a/tests/algorithms/partitioner/test_split_condition.py +++ b/tests/algorithms/partitioner/test_split_condition.py @@ -9,7 +9,7 @@ class TestSplitCondition(TestCase): def test_float_value(self): cs = config_space_nd(1, upper=5, lower=-5) - hyperparameter = cs.get_hyperparameter('x1') + hyperparameter = cs["x1"] value = 3 cond = SplitCondition(cs, hyperparameter, value=value, less_equal=True) @@ -29,7 +29,7 @@ def test_float_value(self): def test_normalized_value(self): cs = config_space_nd(1, upper=5, lower=-5) - hyperparameter = cs.get_hyperparameter('x1') + hyperparameter = cs["x1"] normalized_value = 0.4 cond = SplitCondition(cs, hyperparameter, normalized_value=normalized_value, less_equal=False) diff --git a/tests/algorithms/test_ice.py b/tests/algorithms/test_ice.py index db45a41..2eb936f 100644 --- a/tests/algorithms/test_ice.py +++ b/tests/algorithms/test_ice.py @@ -13,7 +13,7 @@ def test_create_ice_1D_f_1D(self): f = Square.for_n_dimensions(1) cs = f.config_space bo = BayesianOptimizationSampler(f, config_space=cs) - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo.sample(10) num_grid_points = 1000 @@ -47,7 +47,7 @@ def test_create_ice_1D_f_2D(self): f = Square.for_n_dimensions(2) cs = f.config_space bo = BayesianOptimizationSampler(f, config_space=cs) - selected_hyperparameter = cs.get_hyperparameter("x1") + selected_hyperparameter = cs["x1"] bo.sample(10) num_grid_points = 20 @@ -76,7 +76,7 @@ def test_create_ice_centered(self): f = Square.for_n_dimensions(2) cs = f.config_space bo = BayesianOptimizationSampler(f, config_space=cs) - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo.sample(10) ice = ICE.from_random_points(bo.surrogate_model, selected_hp) @@ -89,14 +89,14 @@ def test_ice_curve_configspace(self): f = StyblinskiTang.for_n_dimensions(2) cs = f.config_space bo = BayesianOptimizationSampler(f, config_space=cs) - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo.sample(10) ice = ICE.from_random_points(bo.surrogate_model, selected_hp) ice_curve = ice[0] reduced_cs = ice_curve.implied_config_space - x1 = reduced_cs.get_hyperparameter("x1") - x2 = reduced_cs.get_hyperparameter("x2") + x1 = reduced_cs["x1"] + x2 = reduced_cs["x2"] self.assertEqual(selected_hp.lower, x1.lower) self.assertEqual(selected_hp.upper, x1.upper) diff --git a/tests/algorithms/test_pdp.py b/tests/algorithms/test_pdp.py index 7065e4e..ab3aecb 100644 --- a/tests/algorithms/test_pdp.py +++ b/tests/algorithms/test_pdp.py @@ -13,7 +13,7 @@ class TestPDP(PlottableTest): def test_calculate_pdp_1D(self): f = Square.for_n_dimensions(1) cs = f.config_space - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo = BayesianOptimizationSampler(f, config_space=cs) bo.sample(10) @@ -26,7 +26,7 @@ def test_calculate_pdp_1D(self): def test_create_pdp_2D(self): f = Square.for_n_dimensions(2) cs = f.config_space - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo = BayesianOptimizationSampler(f, config_space=cs) bo.sample(10) @@ -42,7 +42,7 @@ def test_create_pdp_centered(self): cs = f.config_space bo = BayesianOptimizationSampler(f, config_space=cs) - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] bo.sample(10) pdp = PDP.from_random_points(bo.surrogate_model, selected_hp) diff --git a/tests/blackbox_functions/test_hpo_bench.py b/tests/blackbox_functions/test_hpo_bench.py index 5fbe070..74b2523 100644 --- a/tests/blackbox_functions/test_hpo_bench.py +++ b/tests/blackbox_functions/test_hpo_bench.py @@ -1,3 +1,5 @@ +import unittest + from matplotlib import pyplot as plt from pyPDP.algorithms.ice import ICE @@ -9,6 +11,7 @@ class TestHPOBench(PlottableTest): + @unittest.skip("HPOBench requirement versions breaks stuff...") def test_svm_task_2079(self): """ Took ~3 min for me (dwoiwode) @@ -29,7 +32,7 @@ def test_svm_task_2079(self): # Surrogate model surrogate_model = GaussianProcessSurrogate(cs, seed=seed) surrogate_model.fit(sampler.X, sampler.y) - for selected_hyperparameter in cs.get_hyperparameters(): + for selected_hyperparameter in list(cs.values()): self.initialize_figure() self.fig.suptitle(f"HPOBench SVM Task 2079 - {selected_hyperparameter.name} - {len(sampler)} samples") @@ -50,6 +53,7 @@ def test_svm_task_2079(self): self.save_fig() plt.show() + @unittest.skip("HPOBench requirement versions breaks stuff...") def test_rf_task_2079(self): seed = 0 cs, f = get_RFBenchmarkMF(2079, seed=seed) @@ -64,7 +68,7 @@ def test_rf_task_2079(self): # Surrogate model surrogate_model = GaussianProcessSurrogate(cs, seed=seed) surrogate_model.fit(sampler.X, sampler.y) - for selected_hyperparameter in cs.get_hyperparameters(): + for selected_hyperparameter in list(cs.values()): self.initialize_figure() self.fig.suptitle(f"HPOBench RF Task 2079 - {selected_hyperparameter.name} - {len(sampler)} samples") @@ -85,6 +89,7 @@ def test_rf_task_2079(self): self.save_fig() plt.show() + @unittest.skip("HPOBench requirement versions breaks stuff...") def test_nn_task_2079(self): seed = 0 cs, f = get_NNBenchmarkMF(2079, seed=seed) @@ -99,7 +104,7 @@ def test_nn_task_2079(self): # Surrogate model surrogate_model = GaussianProcessSurrogate(cs, seed=seed) surrogate_model.fit(sampler.X, sampler.y) - for selected_hyperparameter in cs.get_hyperparameters(): + for selected_hyperparameter in list(cs.values()): self.initialize_figure() self.fig.suptitle(f"HPOBench NN Task 2079 - {selected_hyperparameter.name} - {len(sampler)} samples") diff --git a/tests/blackbox_functions/test_synthetic.py b/tests/blackbox_functions/test_synthetic.py index 766f5c4..c36430d 100644 --- a/tests/blackbox_functions/test_synthetic.py +++ b/tests/blackbox_functions/test_synthetic.py @@ -16,7 +16,7 @@ class TestConfigspaceND(TestCase): def test_same_bounds(self): cs = config_space_nd(4, lower=-4, upper=5, log=False) - hps = cs.get_hyperparameters() + hps = list(cs.values()) for hp in hps: self.assertIsInstance(hp, CSH.NumericalHyperparameter) @@ -27,7 +27,7 @@ def test_same_bounds(self): def test_prefix(self): # Default prefix cs = config_space_nd(4) - hps = cs.get_hyperparameters() + hps = list(cs.values()) expected_names = {"x1", "x2", "x3", "x4"} names = {hp.name for hp in hps} @@ -35,7 +35,7 @@ def test_prefix(self): # Other prefix cs = config_space_nd(4, variable_prefix="other_prefix_") - hps = cs.get_hyperparameters() + hps = list(cs.values()) expected_names = {"other_prefix_1", "other_prefix_2", "other_prefix_3", "other_prefix_4"} names = {hp.name for hp in hps} @@ -43,7 +43,7 @@ def test_prefix(self): def test_different_bounds(self): cs = config_space_nd(3, lower=(0, -1.5, -2), upper=(5, 20, 32.3)) - hps = cs.get_hyperparameters() + hps = list(cs.values()) # Check Hyperparameter 0 self.assertIsInstance(hps[0], CSH.NumericalHyperparameter) @@ -62,7 +62,7 @@ def test_different_bounds(self): def test_constants(self): cs = config_space_nd(3, lower=(0, 5, -2.32), upper=(0, 5, -2.32)) - hps = cs.get_hyperparameters() + hps = list(cs.values()) # Check Hyperparameter 0 self.assertIsInstance(hps[0], CSH.Constant) @@ -81,7 +81,7 @@ class TestLevy(TestCase): def test_config_space(self): f = Levy() default_cs = f.config_space - hp = default_cs.get_hyperparameter("x1") + hp = default_cs["x1"] self.assertIsInstance(hp, CSH.NumericalHyperparameter) self.assertEqual(-10, hp.lower) self.assertEqual(10, hp.upper) @@ -112,7 +112,7 @@ class TestAckley(TestCase): def test_config_space(self): f = Ackley() default_cs = f.config_space - hp = default_cs.get_hyperparameter("x1") + hp = default_cs["x1"] self.assertIsInstance(hp, CSH.NumericalHyperparameter) self.assertEqual(-32.768, hp.lower) self.assertEqual(32.768, hp.upper) @@ -143,7 +143,7 @@ class TestCrossInTray(TestCase): def test_config_space(self): f = CrossInTray() default_cs = f.config_space - hp = default_cs.get_hyperparameter("x1") + hp = default_cs["x1"] self.assertIsInstance(hp, CSH.NumericalHyperparameter) self.assertEqual(-10, hp.lower) self.assertEqual(10, hp.upper) @@ -171,7 +171,7 @@ class TestStyblinskiTang(TestCase): def test_config_space(self): f = StyblinskiTang() default_cs = f.config_space - hp = default_cs.get_hyperparameter("x1") + hp = default_cs["x1"] self.assertIsInstance(hp, CSH.NumericalHyperparameter) self.assertEqual(-5, hp.lower) self.assertEqual(5, hp.upper) @@ -271,7 +271,7 @@ def styblinski_tang_3D_int_2D(x1: float, x2: float, lower: float = -5, upper: fl return (upper_term - lower_term) / (upper - lower) # normalization f_int_specific = styblinski_tang_3D_int_2D - f_int_general = f.pd_integral(f.config_space.get_hyperparameter('x3')) + f_int_general = f.pd_integral(f.config_space["x3"]) for x1 in np.linspace(-5, 5, num=100): for x2 in np.linspace(-5, 5, num=100): @@ -294,7 +294,7 @@ def styblinski_tang_3D_int_2D(x1: float, x3: float, lower: float = -5, upper: fl return (upper_term - lower_term) / (upper - lower) # normalization f_int_specific = styblinski_tang_3D_int_2D - f_int_general = f.pd_integral(f.config_space.get_hyperparameter('x2')) + f_int_general = f.pd_integral(f.config_space["x2"]) for x1 in np.linspace(-5, 5, num=100): for x3 in np.linspace(-5, 5, num=100): diff --git a/tests/sampler/test_seeds.py b/tests/sampler/test_seeds.py index 0c322ff..e5b7e99 100644 --- a/tests/sampler/test_seeds.py +++ b/tests/sampler/test_seeds.py @@ -70,7 +70,7 @@ def test_seeds_bo(self): # Model might be deterministic -> Cannot guarantee that not seeded != seeded # ICE - selected_hp = f_seeded_1.config_space.get_hyperparameter("x1") + selected_hp = f_seeded_1.config_space["x1"] ice_seeded_1 = ICE.from_random_points(surrogate_seeded_1, selected_hp, seed=self.seed) ice_seeded_2 = ICE.from_random_points(surrogate_seeded_2, selected_hp, seed=self.seed) ice_not_seeded = ICE.from_random_points(surrogate_not_seeded, selected_hp) diff --git a/tests/utils/test_plotting.py b/tests/utils/test_plotting.py index 00119b8..115e32b 100644 --- a/tests/utils/test_plotting.py +++ b/tests/utils/test_plotting.py @@ -121,7 +121,7 @@ def square_2D(x1: float, x2: float) -> float: self.initialize_figure() cs = config_space_nd(2) - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] sampler = RandomSampler(square_2D, config_space=cs) sampler.sample(10) @@ -138,7 +138,7 @@ def square_2D(x1: float, x2: float) -> float: self.initialize_figure() cs = config_space_nd(2) - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] sampler = RandomSampler(square_2D, config_space=cs) sampler.sample(10) @@ -157,7 +157,7 @@ def square_2D(x1: float, x2: float) -> float: self.initialize_figure() cs = config_space_nd(2) - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] sampler = RandomSampler(square_2D, config_space=cs) sampler.sample(10) @@ -174,7 +174,7 @@ def square_2D(x1: float, x2: float) -> float: self.initialize_figure() cs = config_space_nd(2) - selected_hp = cs.get_hyperparameter("x1") + selected_hp = cs["x1"] sampler = RandomSampler(square_2D, config_space=cs) sampler.sample(10) diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py index ca8a365..4d64251 100644 --- a/tests/utils/test_utils.py +++ b/tests/utils/test_utils.py @@ -1,5 +1,4 @@ from unittest import TestCase -import ConfigSpace as CS import ConfigSpace.hyperparameters as CSH import numpy as np from ConfigSpace import ConfigurationSpace, Configuration