Skip to content

Commit

Permalink
Remove extraneous current_value computation from input constructor …
Browse files Browse the repository at this point in the history
…for `construct_inputs_qKG` (#2520)

Summary:
Pull Request resolved: #2520

The input constructor `construct_inputs_qKG` previously computed `current_value` which is extraneous (it is merely an additive constant in the acquisition function).

However, `construct_inputs_qMFKG`, which *does* require an explicit `current_value` (in the cost-aware setting), was piggy-backing off `construct_inputs_qKG`, which I suspect is why this persisted.

Having said that, `construct_inputs_qKG` computes `current_value` incorrectly for the purposes of  `construct_inputs_qMFKG` anyway (as it failed to take fidelity input dimension into consideration), so it wasn't actually useful or beneficial under any circumstances.

With D62391106, which calculates `current_value` correctly for `construct_inputs_qMFKG`, we can safely remove this computation altogether from `construct_inputs_qKG`

Reviewed By: Balandat

Differential Revision: D62415444
  • Loading branch information
ltiao authored and facebook-github-bot committed Sep 10, 2024
1 parent 167dcb7 commit 8e23e85
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 24 deletions.
33 changes: 19 additions & 14 deletions botorch/acquisition/input_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -1240,30 +1240,35 @@ def construct_inputs_qKG(
objective: Optional[MCAcquisitionObjective] = None,
posterior_transform: Optional[PosteriorTransform] = None,
num_fantasies: int = 64,
with_current_value: bool = False,
**optimize_objective_kwargs: TOptimizeObjectiveKwargs,
) -> dict[str, Any]:
r"""Construct kwargs for `qKnowledgeGradient` constructor."""

X = _get_dataset_field(training_data, "X", first_only=True)
_bounds = torch.as_tensor(bounds, dtype=X.dtype, device=X.device)

_, current_value = optimize_objective(
model=model,
bounds=_bounds.t(),
q=1,
objective=objective,
posterior_transform=posterior_transform,
**optimize_objective_kwargs,
)

return {
inputs_qkg = {
"model": model,
"objective": objective,
"posterior_transform": posterior_transform,
"num_fantasies": num_fantasies,
"current_value": current_value.detach().cpu().max(),
}

if with_current_value:

X = _get_dataset_field(training_data, "X", first_only=True)
_bounds = torch.as_tensor(bounds, dtype=X.dtype, device=X.device)

_, current_value = optimize_objective(
model=model,
bounds=_bounds.t(),
q=1,
objective=objective,
posterior_transform=posterior_transform,
**optimize_objective_kwargs,
)
inputs_qkg["current_value"] = current_value.detach().cpu().max()

return inputs_qkg


@acqf_input_constructor(qMultiFidelityKnowledgeGradient)
def construct_inputs_qMFKG(
Expand Down
35 changes: 25 additions & 10 deletions test/acquisition/test_input_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -1257,24 +1257,39 @@ def test_construct_inputs_qLogNParEGO(self) -> None:

class TestKGandESAcquisitionFunctionInputConstructors(InputConstructorBaseTestCase):
def test_construct_inputs_kg(self) -> None:
current_value = torch.tensor(1.23)
with mock.patch(
target="botorch.acquisition.input_constructors.optimize_objective",
return_value=(None, current_value),
):
from botorch.acquisition import input_constructors
func = get_acqf_input_constructor(qKnowledgeGradient)

func = input_constructors.get_acqf_input_constructor(qKnowledgeGradient)
with self.subTest("test_with_current_value"):

current_value = torch.tensor(1.23)

with mock.patch(
target="botorch.acquisition.input_constructors.optimize_objective",
return_value=(None, current_value),
):

kwargs = func(
model=mock.Mock(),
training_data=self.blockX_blockY,
objective=LinearMCObjective(torch.rand(2)),
bounds=self.bounds,
num_fantasies=33,
with_current_value=True,
)

self.assertEqual(kwargs["num_fantasies"], 33)
self.assertEqual(kwargs["current_value"], current_value)

with self.subTest("test_without_current_value"):
kwargs = func(
model=mock.Mock(),
training_data=self.blockX_blockY,
objective=LinearMCObjective(torch.rand(2)),
bounds=self.bounds,
num_fantasies=33,
with_current_value=False,
)

self.assertEqual(kwargs["num_fantasies"], 33)
self.assertEqual(kwargs["current_value"], current_value)
self.assertNotIn("current_value", kwargs)

def test_construct_inputs_mes(self) -> None:
func = get_acqf_input_constructor(qMaxValueEntropy)
Expand Down

0 comments on commit 8e23e85

Please sign in to comment.