diff --git a/botorch/acquisition/input_constructors.py b/botorch/acquisition/input_constructors.py index 4000e5ddcf..8cf47f16f4 100644 --- a/botorch/acquisition/input_constructors.py +++ b/botorch/acquisition/input_constructors.py @@ -65,6 +65,7 @@ from botorch.acquisition.multi_objective.hypervolume_knowledge_gradient import ( _get_hv_value_function, qHypervolumeKnowledgeGradient, + qMultiFidelityHypervolumeKnowledgeGradient, ) from botorch.acquisition.multi_objective.logei import ( qLogExpectedHypervolumeImprovement, @@ -1370,6 +1371,53 @@ def construct_inputs_qMFKG( } +@acqf_input_constructor(qMultiFidelityHypervolumeKnowledgeGradient) +def construct_inputs_qMFHVKG( + model: Model, + training_data: MaybeDict[SupervisedDataset], + bounds: list[tuple[float, float]], + target_fidelities: dict[int, Union[int, float]], + objective_thresholds: Tensor, + objective: Optional[MCMultiOutputObjective] = None, + posterior_transform: Optional[PosteriorTransform] = None, + fidelity_weights: Optional[dict[int, float]] = None, + cost_intercept: float = 1.0, + num_trace_observations: int = 0, + num_fantasies: int = 8, + num_pareto: int = 10, +) -> dict[str, Any]: + r"""Construct kwargs for `qMultiFidelityKnowledgeGradient` constructor.""" + + inputs_mf = construct_inputs_mf_base( + target_fidelities=target_fidelities, + fidelity_weights=fidelity_weights, + cost_intercept=cost_intercept, + num_trace_observations=num_trace_observations, + ) + + if num_trace_observations > 0: + raise NotImplementedError( + "`qMultiFidelityHypervolumeKnowledgeGradient` does not support " + "trace observations." + ) + + # `qMultiFidelityHypervolumeKnowledgeGradient` does not support `expand` argument + del inputs_mf["expand"] + + inputs_hvkg = construct_inputs_qHVKG( + model=model, + training_data=training_data, + bounds=bounds, + objective_thresholds=objective_thresholds, + objective=objective, + posterior_transform=posterior_transform, + num_fantasies=num_fantasies, + num_pareto=num_pareto, + ) + + return {**inputs_mf, **inputs_hvkg, "target_fidelities": target_fidelities} + + @acqf_input_constructor(qMultiFidelityMaxValueEntropy) def construct_inputs_qMFMES( model: Model, diff --git a/test/acquisition/test_input_constructors.py b/test/acquisition/test_input_constructors.py index 9cf925f516..651bfc5453 100644 --- a/test/acquisition/test_input_constructors.py +++ b/test/acquisition/test_input_constructors.py @@ -74,6 +74,7 @@ ) from botorch.acquisition.multi_objective.hypervolume_knowledge_gradient import ( qHypervolumeKnowledgeGradient, + qMultiFidelityHypervolumeKnowledgeGradient, ) from botorch.acquisition.multi_objective.logei import ( @@ -1454,6 +1455,36 @@ def test_construct_inputs_mfkg(self) -> None: ) self.assertEqual(inputs_mfkg["current_value"], current_value) + def test_construct_inputs_mfhvkg(self) -> None: + + current_value = torch.tensor(1.23) + objective_thresholds = torch.rand(2) + + with mock.patch( + target="botorch.acquisition.input_constructors.optimize_objective", + return_value=(None, current_value), + ): + + get_kwargs = get_acqf_input_constructor( + qMultiFidelityHypervolumeKnowledgeGradient + ) + kwargs = get_kwargs( + model=mock.Mock(), + training_data=self.blockX_blockY, + objective_thresholds=objective_thresholds, + bounds=self.bounds, + num_fantasies=33, + num_pareto=11, + target_fidelities={0: 0.987}, + fidelity_weights={0: 0.654}, + cost_intercept=0.321, + ) + + self.assertEqual(kwargs["num_fantasies"], 33) + self.assertEqual(kwargs["num_pareto"], 11) + self.assertEqual(kwargs["current_value"], current_value) + self.assertTrue(torch.equal(kwargs["ref_point"], objective_thresholds)) + def test_construct_inputs_mfmes(self) -> None: target_fidelities = {0: 0.987} constructor_args = { @@ -1598,6 +1629,7 @@ def setUp(self, suppress_input_warnings: bool = True) -> None: "bounds": bounds, }, ) + self.cases["MF look-ahead"] = ( [qMultiFidelityMaxValueEntropy], { @@ -1657,6 +1689,18 @@ def setUp(self, suppress_input_warnings: bool = True) -> None: "objective_thresholds": objective_thresholds, }, ) + self.cases["MF HV Look-ahead"] = ( + [qMultiFidelityHypervolumeKnowledgeGradient], + { + "model": mock.Mock(), + "training_data": self.blockX_blockY, + "bounds": bounds, + "target_fidelities": {0: 0.987}, + "num_fantasies": 30, + "objective_thresholds": objective_thresholds, + }, + ) + pref_model = self.mock_model pref_model.dim = 2 pref_model.datapoints = torch.tensor([]) @@ -1713,6 +1757,7 @@ def test_all_cases_covered(self) -> None: all_classes_tested = reduce( lambda x, y: x + y, [cls_list for cls_list, _ in self.cases.values()] ) + for acqf_cls in ACQF_INPUT_CONSTRUCTOR_REGISTRY.keys(): with self.subTest(acqf_cls=acqf_cls): self.assertIn(acqf_cls, all_classes_tested)