Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make optimizers raise an error when provided negative fixed features #2603

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 14 additions & 5 deletions botorch/optim/optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,10 @@ def __post_init__(self) -> None:
"Must specify `raw_samples` when "
"`batch_initial_conditions` is None`."
)
if self.fixed_features is not None and any(
(k < 0 for k in self.fixed_features)
):
raise ValueError("All indices (keys) in `fixed_features` must be >= 0.")

def get_ic_generator(self) -> TGenInitialConditions:
if self.ic_generator is not None:
Expand Down Expand Up @@ -467,7 +471,8 @@ def optimize_acqf(
is set to 1, which will be done automatically if not specified in
`options`.
fixed_features: A map `{feature_index: value}` for features that
should be fixed to a particular value during generation.
should be fixed to a particular value during generation. All indices
should be non-negative.
post_processing_func: A function that post-processes an optimization
result appropriately (i.e., according to `round-trip`
transformations).
Expand Down Expand Up @@ -610,7 +615,8 @@ def optimize_acqf_cyclic(
with each tuple encoding an inequality constraint of the form
`\sum_i (X[indices[i]] * coefficients[i]) = rhs`
fixed_features: A map `{feature_index: value}` for features that
should be fixed to a particular value during generation.
should be fixed to a particular value during generation. All indices
should be non-negative.
post_processing_func: A function that post-processes an optimization
result appropriately (i.e., according to `round-trip`
transformations).
Expand Down Expand Up @@ -758,11 +764,13 @@ def optimize_acqf_list(
Using non-linear inequality constraints also requires that `batch_limit`
is set to 1, which will be done automatically if not specified in
`options`.
fixed_features: A map `{feature_index: value}` for features that
should be fixed to a particular value during generation.
fixed_features: A map `{feature_index: value}` for features that should
be fixed to a particular value during generation. All indices
(`feature_index`) should be non-negative.
fixed_features_list: A list of maps `{feature_index: value}`. The i-th
item represents the fixed_feature for the i-th optimization. If
`fixed_features_list` is provided, `optimize_acqf_mixed` is invoked.
All indices (`feature_index`) should be non-negative.
post_processing_func: A function that post-processes an optimization
result appropriately (i.e., according to `round-trip`
transformations).
Expand Down Expand Up @@ -872,7 +880,8 @@ def optimize_acqf_mixed(
raw_samples: Number of samples for initialization. This is required
if `batch_initial_conditions` is not specified.
fixed_features_list: A list of maps `{feature_index: value}`. The i-th
item represents the fixed_feature for the i-th optimization.
item represents the fixed_feature for the i-th optimization. All
indices (`feature_index`) should be non-negative.
options: Options for candidate generation.
inequality constraints: A list of tuples (indices, coefficients, rhs),
with each tuple encoding an inequality constraint of the form
Expand Down
42 changes: 41 additions & 1 deletion test/optim/test_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

import itertools
import warnings
from functools import partial
from itertools import product
from typing import Any
from unittest import mock
Expand Down Expand Up @@ -1119,6 +1120,45 @@ def __call__(self, x, f):
self.assertEqual(f_np_wrapper.call_count, 2)


class TestAllOptimizers(BotorchTestCase):
def test_raises_with_negative_fixed_features(self) -> None:
cases = {
"optimize_acqf": partial(
optimize_acqf,
acq_function=MockAcquisitionFunction(),
fixed_features={-1: 0.0},
q=1,
),
"optimize_acqf_cyclic": partial(
optimize_acqf_cyclic,
acq_function=MockAcquisitionFunction(),
fixed_features={-1: 0.0},
q=1,
),
"optimize_acqf_mixed": partial(
optimize_acqf_mixed,
acq_function=MockAcquisitionFunction(),
fixed_features_list=[{-1: 0.0}],
q=1,
),
"optimize_acqf_list": partial(
optimize_acqf_list,
acq_function_list=[MockAcquisitionFunction()],
fixed_features={-1: 0.0},
),
}

for name, func in cases.items():
with self.subTest(name), self.assertRaisesRegex(
ValueError, "must be >= 0."
):
func(
bounds=torch.tensor([[0.0, 0.0], [1.0, 1.0]], device=self.device),
num_restarts=4,
raw_samples=16,
)


class TestOptimizeAcqfCyclic(BotorchTestCase):
@mock.patch("botorch.optim.optimize._optimize_acqf") # noqa: C901
# TODO: make sure this runs without mock
Expand Down Expand Up @@ -1171,7 +1211,7 @@ def test_optimize_acqf_cyclic(self, mock_optimize_acqf):
"set_X_pending",
wraps=mock_acq_function.set_X_pending,
) as mock_set_X_pending:
candidates, acq_value = optimize_acqf_cyclic(
candidates, _ = optimize_acqf_cyclic(
acq_function=mock_acq_function,
bounds=bounds,
q=q,
Expand Down
Loading