Skip to content

Commit

Permalink
Merge pull request #256 from xopt-org/spectral_time_kernel
Browse files Browse the repository at this point in the history
Spectral time kernel
  • Loading branch information
roussel-ryan authored Dec 16, 2024
2 parents 0b8c706 + 8399722 commit b6bcd6e
Show file tree
Hide file tree
Showing 3 changed files with 86 additions and 17 deletions.
28 changes: 11 additions & 17 deletions docs/examples/single_objective_bayes_opt/time_dependent_bo.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,7 @@
{
"cell_type": "markdown",
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
"collapsed": false
},
"source": [
"# Time dependent Bayesian Optimization\n",
Expand Down Expand Up @@ -37,16 +34,14 @@
"SMOKE_TEST = os.environ.get(\"SMOKE_TEST\")\n",
"N_MC_SAMPLES = 1 if SMOKE_TEST else 128\n",
"NUM_RESTARTS = 1 if SMOKE_TEST else 20\n",
"N_STEPS = 1 if SMOKE_TEST else 250\n",
"warnings.filterwarnings(\"ignore\")"
]
},
{
"cell_type": "markdown",
"metadata": {
"collapsed": false,
"jupyter": {
"outputs_hidden": false
}
"collapsed": false
},
"source": [
"## Time dependent test problem\n",
Expand Down Expand Up @@ -147,20 +142,19 @@
" vocs=vocs,\n",
" beta=0.01,\n",
" added_time=0.1,\n",
" forgetting_time=20.0,\n",
" forgetting_time=10.0,\n",
")\n",
"generator.n_monte_carlo_samples = N_MC_SAMPLES\n",
"generator.numerical_optimizer.n_restarts = NUM_RESTARTS\n",
"generator.max_travel_distances = [0.1]\n",
"\n",
"start_time = time.time()\n",
"\n",
"X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)\n",
"X.random_evaluate(1)\n",
"X.random_evaluate(2)\n",
"\n",
"for i in trange(300):\n",
" # note that in this example we can ignore warnings if computation time is greater\n",
" # than added time\n",
"for _ in trange(N_STEPS):\n",
" # note that in this example we can ignore warnings if computation\n",
" # time is greater than added time\n",
" with warnings.catch_warnings():\n",
" warnings.filterwarnings(\"ignore\", category=RuntimeWarning)\n",
" X.step()\n",
Expand Down Expand Up @@ -211,7 +205,7 @@
" ax.set_title(\"model mean\")\n",
" ax.set_xlabel(\"unix time\")\n",
" ax.set_ylabel(\"x\")\n",
" c = ax.pcolor(tt, xx, mean.reshape(n, n))\n",
" c = ax.pcolor(tt, xx, mean.reshape(n, n), rasterized=True)\n",
" ax.plot(data[\"time\"].to_numpy(), data[\"x\"].to_numpy(), \"oC1\", label=\"samples\")\n",
"\n",
" ax.plot(t, k(t - start_time), \"C3--\", label=\"ideal path\", zorder=10)\n",
Expand Down Expand Up @@ -316,9 +310,9 @@
"start_time = time.time()\n",
"\n",
"X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)\n",
"X.random_evaluate(1)\n",
"X.random_evaluate(2)\n",
"\n",
"for i in trange(300):\n",
"for i in trange(N_STEPS):\n",
" # note that in this example we can ignore warnings if computation time is greater\n",
" # than added time\n",
" if i == 50:\n",
Expand Down
50 changes: 50 additions & 0 deletions xopt/generators/bayesian/models/time_dependent.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,23 @@
import pandas as pd
import torch
from botorch.models import ModelListGP
from gpytorch.kernels import (
ProductKernel,
SpectralMixtureKernel,
MaternKernel,
)
from gpytorch.priors import GammaPrior
from pydantic import Field

from xopt.generators.bayesian.models.standard import StandardModelConstructor
from xopt.generators.bayesian.utils import get_training_data
from xopt.vocs import VOCS


class TimeDependentModelConstructor(StandardModelConstructor):
name: str = Field("time_dependent", frozen=True)
use_spectral_mixture_kernel: bool = True
initialize_spectral_kernel_from_data: bool = False

def build_model(
self,
Expand All @@ -31,6 +40,47 @@ def build_model(
new_input_bounds = deepcopy(input_bounds)
new_input_bounds["time"] = [min_t, max_t]

# set covar modules if not specified -- use SpectralMixtureKernel for time axis
# see Kuklev, N., et al. "Online accelerator tuning with adaptive
# bayesian optimization." Proc. NAPAC 22 (2022): 842.
if self.use_spectral_mixture_kernel:
covar_modules = {}
for name in outcome_names:
if len(input_names) == 1:
matern_dims = [0]
else:
matern_dims = tuple(range(len(input_names)))
time_dim = [len(input_names)]

matern_kernel = MaternKernel(
nu=2.5,
active_dims=matern_dims,
lengthscale_prior=GammaPrior(3.0, 6.0),
)
spectral_kernel = SpectralMixtureKernel(
num_mixtures=3, active_dims=time_dim
)

if self.initialize_spectral_kernel_from_data:
train_X, train_Y, train_Yvar = get_training_data(
new_input_names, name, data
)

# can only initialize spectral kernel from data if there are
# more than one training data point
if len(train_X) > 1:
spectral_kernel.initialize_from_data(train_X, train_Y)
else:
raise RuntimeWarning(
"cannot initialize spectral kernel from a "
"single data sample, may negatively impact"
" performance"
)

covar_modules[name] = ProductKernel(spectral_kernel, matern_kernel)

self.covar_modules = covar_modules

return super().build_model(
new_input_names, outcome_names, data, new_input_bounds, dtype, device
)
Expand Down
25 changes: 25 additions & 0 deletions xopt/tests/generators/bayesian/test_time_dependent_bo.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,31 @@ def test_init(self):

@patch.multiple(TimeDependentBayesianGenerator, __abstractmethods__=set())
def test_model_generation(self):
# test single dim variable space
vocs = deepcopy(TEST_VOCS_BASE)
vocs.variables = {"x1": [0, 1.0]}
gen = TimeDependentBayesianGenerator(vocs=vocs)
test_data = deepcopy(TEST_VOCS_DATA)
test_data.drop("x2", axis=1, inplace=True)

time_array = []
for i in range(len(test_data)):
time_array.append(time.time())
time.sleep(0.01)

test_data["time"] = np.array(time_array)

model = gen.train_model(test_data)

# make sure time data is in the last model
assert np.all(
model.models[-1]
.input_transform.untransform(model.models[-1].train_inputs[0])[:, -1]
.numpy()
== test_data["time"].to_numpy().flatten()
)

# test multi-dim variable space
gen = TimeDependentBayesianGenerator(vocs=TEST_VOCS_BASE)
test_data = deepcopy(TEST_VOCS_DATA)

Expand Down

0 comments on commit b6bcd6e

Please sign in to comment.