diff --git a/HEBO/.gitignore b/HEBO/.gitignore index 7f72bd9..8c52073 100644 --- a/HEBO/.gitignore +++ b/HEBO/.gitignore @@ -19,3 +19,5 @@ worksapce/ # catboost catboost_info/ + +.conda_environment diff --git a/HEBO/INSTALL b/HEBO/INSTALL new file mode 100755 index 0000000..e8699df --- /dev/null +++ b/HEBO/INSTALL @@ -0,0 +1,8 @@ +#!/usr/bin/env fish + +true +and conda activate base +and rm -rf ./.conda_environment +and conda env create --prefix ./.conda_environment --file conda.yaml +and conda activate ./.conda_environment +and pip install --default-timeout=300 -r requirements.txt diff --git a/HEBO/conda.yaml b/HEBO/conda.yaml new file mode 100644 index 0000000..10f950f --- /dev/null +++ b/HEBO/conda.yaml @@ -0,0 +1,5 @@ +channels: + - defaults +dependencies: + - pip=23.3 + - python=3.9.18 diff --git a/HEBO/hebo/models/gp/gp_util.py b/HEBO/hebo/models/gp/gp_util.py index 96ecdbf..647d1ab 100644 --- a/HEBO/hebo/models/gp/gp_util.py +++ b/HEBO/hebo/models/gp/gp_util.py @@ -10,14 +10,14 @@ import numpy as np import torch import torch.nn as nn -from gpytorch.kernels import (AdditiveKernel, MaternKernel, ProductKernel, - ScaleKernel) -from gpytorch.priors import GammaPrior from torch import FloatTensor, LongTensor -from ..layers import EmbTransform -from ..util import get_random_graph +from gpytorch.kernels import MaternKernel, ScaleKernel, ProductKernel +from gpytorch.priors import GammaPrior +from gpytorch.constraints.constraints import LessThan + +from ..layers import EmbTransform class DummyFeatureExtractor(nn.Module): def __init__(self, num_cont, num_enum, num_uniqs = None, emb_sizes = None): @@ -43,7 +43,8 @@ def default_kern(x, xe, y, total_dim = None, ard_kernel = True, fe = None, max_x kerns = [] if has_num: ard_num_dims = x.shape[1] if ard_kernel else None - kernel = MaternKernel(nu = 1.5, ard_num_dims = ard_num_dims, active_dims = torch.arange(x.shape[1])) + kernel = MaternKernel(nu = 1.5, ard_num_dims = ard_num_dims, active_dims = torch.arange(x.shape[1]), + lengthscale_constraint=LessThan(5)) if ard_kernel: lscales = kernel.lengthscale.detach().clone().view(1, -1) for i in range(x.shape[1]): @@ -52,19 +53,21 @@ def default_kern(x, xe, y, total_dim = None, ard_kernel = True, fe = None, max_x kernel.lengthscale = lscales kerns.append(kernel) if has_enum: - kernel = MaternKernel(nu = 1.5, active_dims = torch.arange(x.shape[1], total_dim)) + kernel = MaternKernel(nu = 1.5, active_dims = torch.arange(x.shape[1], total_dim), + lengthscale_constraint=LessThan(5)) kerns.append(kernel) final_kern = ScaleKernel(ProductKernel(*kerns), outputscale_prior = GammaPrior(0.5, 0.5)) final_kern.outputscale = y[torch.isfinite(y)].var() return final_kern else: if ard_kernel: - kernel = ScaleKernel(MaternKernel(nu = 1.5, ard_num_dims = total_dim)) + kernel = ScaleKernel(MaternKernel(nu = 1.5, ard_num_dims = total_dim, + lengthscale_constraint=LessThan(5))) else: kernel = ScaleKernel(MaternKernel(nu = 1.5)) kernel.outputscale = y[torch.isfinite(y)].var() return kernel - + def default_kern_rd(x, xe, y, total_dim = None, ard_kernel = True, fe = None, max_x = 1000, E=0.2): ''' Get a default kernel with random decompositons. 0 <= E <=1 specifies random tree conectivity. diff --git a/HEBO/hebo_on_bbob.py b/HEBO/hebo_on_bbob.py new file mode 100644 index 0000000..45c8a17 --- /dev/null +++ b/HEBO/hebo_on_bbob.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +"""HEBO_on_BBOB.ipynb + +Automatically generated by Colaboratory. + +Original file is located at + https://colab.research.google.com/drive/1XftMKU7-tWj0cdWjH7XsfiDPBIKXAWZk +""" + +import hebo +import ioh +import numpy + +ioh_logger = ioh.logger.Analyzer( + [ioh.logger.trigger.ALWAYS], + additional_properties = [ + # ioh.logger.property.EVALUATIONS, + # ioh.logger.property.RAWY, + ioh.logger.property.RAWYBEST, + ioh.logger.property.TRANSFORMEDY, + ioh.logger.property.TRANSFORMEDYBEST, + # ioh.logger.property.CURRENTBESTY, + # ioh.logger.property.CURRENTY, + # ioh.logger.property.PENALTY, + # ioh.logger.property.VIOLATION, + ], + algorithm_name = "HEBO", + folder_name = "HEBO", + root = "HEBO_on_BBOB", +) + +# WARNING: The initial states of the tool to generate random numbers are NOT fixed within these parameters. +BBOB_SEARCH_SPACE_LOWER_BOUND = -5 +BBOB_SEARCH_SPACE_UPPER_BOUND = 5 +num_variables_list = [60] +fids = [1] +instances = [0] +num_runs = 10 +run_budget = 80 +total_num_runs = num_runs * len(instances) * len(fids) * len(num_variables_list) +run_index = 0 +for fid in fids: + for instance in instances: + for num_variables in num_variables_list: + for run_index in range(num_runs): + bbob_problem = ioh.get_problem( + fid = fid, + instance = instance, + dimension = num_variables, + ) + bbob_problem.attach_logger(ioh_logger) + + hebo_space = hebo.design_space.design_space.DesignSpace().parse([ + { + 'name': f'x{variable_index}', + 'type': 'num', + 'lb': BBOB_SEARCH_SPACE_LOWER_BOUND, + 'ub': BBOB_SEARCH_SPACE_UPPER_BOUND, + } + for variable_index in range(num_variables) + ]) + + hebo_optimizer = hebo.optimizers.hebo.HEBO(hebo_space) + for i in range(run_budget): + recommendations_DataFrame = hebo_optimizer.suggest(n_suggestions = 1) + recommendations_list = recommendations_DataFrame.iloc[0].tolist() + recommendation = numpy.array([bbob_problem(recommendations_list)]) + hebo_optimizer.observe(recommendations_DataFrame, recommendation) + + run_index += 1 + print(f"Done: {run_index}/{total_num_runs:,}")