Skip to content

Commit

Permalink
Add input constructor for qMultiFidelityHypervolumeKnowledgeGradient
Browse files Browse the repository at this point in the history
Summary: Adds new input constructors for qMultiFidelityHypervolumeKnowledgeGradient.

Differential Revision: D62459735
  • Loading branch information
ltiao authored and facebook-github-bot committed Sep 10, 2024
1 parent 1dd2ffc commit 7d01b5e
Show file tree
Hide file tree
Showing 2 changed files with 93 additions and 0 deletions.
48 changes: 48 additions & 0 deletions botorch/acquisition/input_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@
from botorch.acquisition.multi_objective.hypervolume_knowledge_gradient import (
_get_hv_value_function,
qHypervolumeKnowledgeGradient,
qMultiFidelityHypervolumeKnowledgeGradient,
)
from botorch.acquisition.multi_objective.logei import (
qLogExpectedHypervolumeImprovement,
Expand Down Expand Up @@ -1370,6 +1371,53 @@ def construct_inputs_qMFKG(
}


@acqf_input_constructor(qMultiFidelityHypervolumeKnowledgeGradient)
def construct_inputs_qMFHVKG(
model: Model,
training_data: MaybeDict[SupervisedDataset],
bounds: list[tuple[float, float]],
target_fidelities: dict[int, Union[int, float]],
objective_thresholds: Tensor,
objective: Optional[MCMultiOutputObjective] = None,
posterior_transform: Optional[PosteriorTransform] = None,
fidelity_weights: Optional[dict[int, float]] = None,
cost_intercept: float = 1.0,
num_trace_observations: int = 0,
num_fantasies: int = 8,
num_pareto: int = 10,
) -> dict[str, Any]:
r"""Construct kwargs for `qMultiFidelityKnowledgeGradient` constructor."""

inputs_mf = construct_inputs_mf_base(
target_fidelities=target_fidelities,
fidelity_weights=fidelity_weights,
cost_intercept=cost_intercept,
num_trace_observations=num_trace_observations,
)

if num_trace_observations > 0:
raise NotImplementedError(
"`qMultiFidelityHypervolumeKnowledgeGradient` does not support "
"trace observations."
)

# `qMultiFidelityHypervolumeKnowledgeGradient` does not support `expand` argument
del inputs_mf["expand"]

inputs_hvkg = construct_inputs_qHVKG(
model=model,
training_data=training_data,
bounds=bounds,
objective_thresholds=objective_thresholds,
objective=objective,
posterior_transform=posterior_transform,
num_fantasies=num_fantasies,
num_pareto=num_pareto,
)

return {**inputs_mf, **inputs_hvkg, "target_fidelities": target_fidelities}


@acqf_input_constructor(qMultiFidelityMaxValueEntropy)
def construct_inputs_qMFMES(
model: Model,
Expand Down
45 changes: 45 additions & 0 deletions test/acquisition/test_input_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@
)
from botorch.acquisition.multi_objective.hypervolume_knowledge_gradient import (
qHypervolumeKnowledgeGradient,
qMultiFidelityHypervolumeKnowledgeGradient,
)

from botorch.acquisition.multi_objective.logei import (
Expand Down Expand Up @@ -1454,6 +1455,36 @@ def test_construct_inputs_mfkg(self) -> None:
)
self.assertEqual(inputs_mfkg["current_value"], current_value)

def test_construct_inputs_mfhvkg(self) -> None:

current_value = torch.tensor(1.23)
objective_thresholds = torch.rand(2)

with mock.patch(
target="botorch.acquisition.input_constructors.optimize_objective",
return_value=(None, current_value),
):

get_kwargs = get_acqf_input_constructor(
qMultiFidelityHypervolumeKnowledgeGradient
)
kwargs = get_kwargs(
model=mock.Mock(),
training_data=self.blockX_blockY,
objective_thresholds=objective_thresholds,
bounds=self.bounds,
num_fantasies=33,
num_pareto=11,
target_fidelities={0: 0.987},
fidelity_weights={0: 0.654},
cost_intercept=0.321,
)

self.assertEqual(kwargs["num_fantasies"], 33)
self.assertEqual(kwargs["num_pareto"], 11)
self.assertEqual(kwargs["current_value"], current_value)
self.assertTrue(torch.equal(kwargs["ref_point"], objective_thresholds))

def test_construct_inputs_mfmes(self) -> None:
target_fidelities = {0: 0.987}
constructor_args = {
Expand Down Expand Up @@ -1598,6 +1629,7 @@ def setUp(self, suppress_input_warnings: bool = True) -> None:
"bounds": bounds,
},
)

self.cases["MF look-ahead"] = (
[qMultiFidelityMaxValueEntropy],
{
Expand Down Expand Up @@ -1657,6 +1689,18 @@ def setUp(self, suppress_input_warnings: bool = True) -> None:
"objective_thresholds": objective_thresholds,
},
)
self.cases["MF HV Look-ahead"] = (
[qMultiFidelityHypervolumeKnowledgeGradient],
{
"model": mock.Mock(),
"training_data": self.blockX_blockY,
"bounds": bounds,
"target_fidelities": {0: 0.987},
"num_fantasies": 30,
"objective_thresholds": objective_thresholds,
},
)

pref_model = self.mock_model
pref_model.dim = 2
pref_model.datapoints = torch.tensor([])
Expand Down Expand Up @@ -1713,6 +1757,7 @@ def test_all_cases_covered(self) -> None:
all_classes_tested = reduce(
lambda x, y: x + y, [cls_list for cls_list, _ in self.cases.values()]
)

for acqf_cls in ACQF_INPUT_CONSTRUCTOR_REGISTRY.keys():
with self.subTest(acqf_cls=acqf_cls):
self.assertIn(acqf_cls, all_classes_tested)

0 comments on commit 7d01b5e

Please sign in to comment.