Skip to content

Commit

Permalink
Merge branch 'botorch_update' into generator_standard
Browse files Browse the repository at this point in the history
  • Loading branch information
roussel-ryan committed Jan 6, 2025
2 parents f5c6b79 + 1a19e81 commit 5a395ea
Show file tree
Hide file tree
Showing 11 changed files with 62 additions and 27 deletions.
2 changes: 1 addition & 1 deletion .github/actions/conda-setup/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ inputs:
python-version:
description: "Conda environment Python version"
required: false
default: "3.9"
default: "3.10"
env_name:
description: "Conda environment name to create"
required: false
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12"]
python-version: ["3.10", "3.11", "3.12", "3.13"]

name: Test Suite
steps:
Expand Down
4 changes: 2 additions & 2 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@ name: xopt-dev
channels:
- conda-forge
dependencies:
- python>=3.9
- python>=3.10
- deap
- numpy
- pydantic>=2.3
- pyyaml
- botorch>=0.9.2,<=0.10.0
- botorch
- scipy>=1.10.1
- pandas
- ipywidgets
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ dependencies = [
"numpy",
"pydantic>=2.3",
"pyyaml",
"botorch>=0.9.2,<=0.10.0",
"botorch",
"scipy>=1.10.1",
"pandas",
"ipywidgets",
Expand All @@ -34,7 +34,7 @@ dynamic = [ "version" ]
keywords = []
name = "xopt"
readme = {file = "README.md", content-type = "text/markdown"}
requires-python = ">=3.9"
requires-python = ">=3.10"

[project.optional-dependencies]
dev = [
Expand Down
4 changes: 2 additions & 2 deletions xopt/generators/bayesian/bax/acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,14 @@ def __init__(self, model: Model, algorithm: Algorithm, bounds: Tensor) -> None:
) = self.algorithm.get_execution_paths(self.model, bounds)

# Need to call the model on some data before we can condition_on_observations
self.model(*[self.xs_exe[:1, 0:1, 0:] for m in model.models])
self.model.posterior(*[self.xs_exe[:1, 0:1, 0:] for m in model.models])

# construct a batch of size n_samples fantasy models,
# where each fantasy model is produced by taking the model
# at the current iteration and conditioning it
# on one of the sampled execution path subsequences:
xs_exe_t = [
model.models[i].input_transform(self.xs_exe)
list(model.models)[i].input_transform(self.xs_exe)
for i in range(len(model.models))
]
ys_exe_t = [
Expand Down
8 changes: 6 additions & 2 deletions xopt/generators/bayesian/bax/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from typing import ClassVar, Dict, List, Tuple

import torch
from botorch.models.model import Model
from botorch.models.model import Model, ModelList
from pydantic import Field, PositiveInt
from torch import Tensor

Expand Down Expand Up @@ -76,7 +76,11 @@ def get_execution_paths(
"""get execution paths that minimize the objective function"""

# build evaluation mesh
test_points = self.create_mesh(bounds).to(model.models[0].train_targets)
test_points = self.create_mesh(bounds)
if isinstance(model, ModelList):
test_points = test_points.to(model.models[0].train_targets)
else:
test_points = test_points.to(model.train_targets)

# get samples of the model posterior at mesh points
posterior_samples = self.evaluate_virtual_objective(
Expand Down
5 changes: 5 additions & 0 deletions xopt/generators/bayesian/bax_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from copy import deepcopy
from typing import Dict

from botorch.models import ModelListGP, SingleTaskGP
from pydantic import Field, field_validator
from pydantic_core.core_schema import ValidationInfo

Expand Down Expand Up @@ -53,6 +54,10 @@ def _get_acquisition(self, model):
for name in self.algorithm.observable_names_ordered
]
bax_model = model.subset_output(bax_model_ids)

if isinstance(bax_model, SingleTaskGP):
bax_model = ModelListGP(bax_model)

eig = ModelListExpectedInformationGain(
bax_model, self.algorithm, self._get_optimization_bounds()
)
Expand Down
45 changes: 35 additions & 10 deletions xopt/generators/bayesian/custom_botorch/heteroskedastic.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,29 @@


class XoptHeteroskedasticSingleTaskGP(BatchedMultiOutputGPyTorchModel, ExactGP):
r"""
Xopt copy of HeteroskedasticSingleTaskGP from botorch which allows for a user
to specify mean and covariance modules.
A single-task exact GP model using a heteroskedastic noise model.
This model differs from `SingleTaskGP` with observed observation noise
variances (`train_Yvar`) in that it can predict noise levels out of sample.
This is achieved by internally wrapping another GP (a `SingleTaskGP`) to model
the (log of) the observation noise. Noise levels must be provided to
`HeteroskedasticSingleTaskGP` as `train_Yvar`.
Examples of cases in which noise levels are known include online
experimentation and simulation optimization.
Example:
>>> train_X = torch.rand(20, 2)
>>> train_Y = torch.sin(train_X).sum(dim=1, keepdim=True)
>>> se = torch.linalg.norm(train_X, dim=1, keepdim=True)
>>> train_Yvar = 0.1 + se * torch.rand_like(train_Y)
>>> model = HeteroskedasticSingleTaskGP(train_X, train_Y, train_Yvar)
"""

def __init__(
self,
train_X: Tensor,
Expand All @@ -34,12 +57,7 @@ def __init__(
covar_module: Optional[Module] = None,
) -> None:
r"""
Xopt copy of HeteroskedasticSingleTaskGP from botorch which allows for a user
to specify mean and covariance modules.
Parameters
----------
Args:
train_X: A `batch_shape x n x d` tensor of training features.
train_Y: A `batch_shape x n x m` tensor of training observations.
train_Yvar: A `batch_shape x n x m` tensor of observed measurement
Expand All @@ -53,6 +71,7 @@ def __init__(
input_transform: An input transfrom that is applied in the model's
forward pass.
"""

if outcome_transform is not None:
train_Y, train_Yvar = outcome_transform(train_Y, train_Yvar)
self._validate_tensor_args(X=train_X, Y=train_Y, Yvar=train_Yvar)
Expand All @@ -65,12 +84,19 @@ def __init__(
MIN_INFERRED_NOISE_LEVEL, transform=None, initial_value=1.0
),
)
# Likelihood will always get evaluated with transformed X, so we need to
# transform the training data before constructing the noise model.
with torch.no_grad():
transformed_X = self.transform_inputs(
X=train_X, input_transform=input_transform
)
noise_model = SingleTaskGP(
train_X=train_X,
train_X=transformed_X,
train_Y=train_Yvar,
likelihood=noise_likelihood,
outcome_transform=Log(),
input_transform=input_transform,
mean_module=mean_module,
covar_module=covar_module,
)
likelihood = _GaussianLikelihoodBase(HeteroskedasticNoise(noise_model))
# This is hacky -- this class used to inherit from SingleTaskGP, but it
Expand All @@ -82,9 +108,8 @@ def __init__(
train_X=train_X,
train_Y=train_Y,
likelihood=likelihood,
outcome_transform=None,
input_transform=input_transform,
mean_module=mean_module,
covar_module=covar_module,
)
self.register_added_loss_term("noise_added_loss")
self.update_added_loss_term(
Expand Down
6 changes: 2 additions & 4 deletions xopt/generators/bayesian/turbo.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,10 +113,8 @@ def get_trust_region(self, generator) -> Tensor:
weights = 1.0

if model is not None:
if model.models[0].covar_module.base_kernel.lengthscale is not None:
lengthscales = model.models[
0
].covar_module.base_kernel.lengthscale.detach()
if model.models[0].covar_module.lengthscale is not None:
lengthscales = model.models[0].covar_module.lengthscale.detach()

# calculate the ratios of lengthscales for each axis
weights = lengthscales / torch.prod(lengthscales) ** (1 / self.dim)
Expand Down
5 changes: 4 additions & 1 deletion xopt/tests/generators/bayesian/test_bayesian_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,10 @@ def test_transforms(self):
input_transform = Normalize(1, bounds=torch.tensor(sinusoid_vocs.bounds))
for inputs in model.train_inputs:
assert torch.allclose(
inputs[0], input_transform(torch.from_numpy(X.data["x1"].to_numpy())).T
inputs[0].unsqueeze(-1).T,
input_transform(
torch.from_numpy(X.data["x1"].to_numpy()).unsqueeze(-1)
).T,
)

# test outcome transform(s)
Expand Down
4 changes: 2 additions & 2 deletions xopt/tests/generators/bayesian/test_model_constructor.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,8 +536,8 @@ def test_model_caching(self):

state = deepcopy(constructor._hyperparameter_store)
assert torch.equal(
old_model.models[0].covar_module.base_kernel.raw_lengthscale,
state["models.0.covar_module.base_kernel.raw_lengthscale"],
old_model.models[0].covar_module.raw_lengthscale,
state["models.0.covar_module.raw_lengthscale"],
)

# add data and use the cached model hyperparameters
Expand Down

0 comments on commit 5a395ea

Please sign in to comment.