Skip to content

Commit

Permalink
Clean up some lints
Browse files Browse the repository at this point in the history
Summary: Trying out a new tool and cleaning up some linter issues it flagged.

Reviewed By: esantorella

Differential Revision: D65274533
  • Loading branch information
saitcakmak authored and facebook-github-bot committed Oct 31, 2024
1 parent 66660e3 commit 86687c4
Show file tree
Hide file tree
Showing 11 changed files with 19 additions and 6 deletions.
1 change: 1 addition & 0 deletions botorch/acquisition/acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ def set_X_pending(self, X_pending: Tensor | None = None) -> None:
"Pending points require a gradient but the acquisition function"
" will not provide a gradient to these points.",
BotorchWarning,
stacklevel=2,
)
self.X_pending = X_pending.detach().clone()
else:
Expand Down
2 changes: 2 additions & 0 deletions botorch/acquisition/cached_cholesky.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ def __init__(
warnings.warn(
_get_cache_root_not_supported_message(type(model)),
RuntimeWarning,
stacklevel=3,
)
cache_root = False
self._cache_root = cache_root
Expand Down Expand Up @@ -150,6 +151,7 @@ def _get_f_X_samples(self, posterior: GPyTorchPosterior, q_in: int) -> Tensor:
"ill-conditioned covariance matrix. "
"Falling back to standard sampling.",
BotorchWarning,
stacklevel=3,
)

# TODO: improve efficiency for multi-task models
Expand Down
1 change: 1 addition & 0 deletions botorch/acquisition/cost_aware.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,7 @@ def forward(
warnings.warn(
"Encountered negative cost values in InverseCostWeightedUtility",
CostAwareWarning,
stacklevel=2,
)
# clamp (away from zero) and sum cost across elements of the q-batch -
# this will be of shape `num_fantasies x batch_shape` or `batch_shape`
Expand Down
1 change: 1 addition & 0 deletions botorch/acquisition/decoupled.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ def set_X_pending(
"Pending points require a gradient but the acquisition function"
" will not provide a gradient to these points.",
BotorchWarning,
stacklevel=2,
)
self.X_pending = X_pending.detach().clone()
if X_pending_evaluation_mask is not None:
Expand Down
6 changes: 5 additions & 1 deletion botorch/acquisition/multi_objective/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,11 @@ def get_default_partitioning_alpha(num_objectives: int) -> float:
if num_objectives <= 4:
return 0.0
elif num_objectives > 6:
warnings.warn("EHVI works best for less than 7 objectives.", BotorchWarning)
warnings.warn(
"EHVI works best for less than 7 objectives.",
BotorchWarning,
stacklevel=3,
)
return 10 ** (-8 + num_objectives)


Expand Down
1 change: 1 addition & 0 deletions botorch/acquisition/multi_step_lookahead.py
Original file line number Diff line number Diff line change
Expand Up @@ -552,6 +552,7 @@ def _construct_inner_samplers(
warnings.warn(
"inner_mc_samples is ignored for analytic acquisition functions",
BotorchWarning,
stacklevel=3,
)
inner_samplers.append(None)
else:
Expand Down
1 change: 1 addition & 0 deletions botorch/models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,7 @@ def _set_transformed_inputs(self) -> None:
"attribute. Make sure that the `input_transform` is applied to "
"both the train inputs and test inputs.",
RuntimeWarning,
stacklevel=3,
)

def _revert_to_original_inputs(self) -> None:
Expand Down
1 change: 1 addition & 0 deletions botorch/utils/multi_objective/hypervolume.py
Original file line number Diff line number Diff line change
Expand Up @@ -789,6 +789,7 @@ def set_X_pending(self, X_pending: Tensor | None = None) -> None:
"Pending points require a gradient but the acquisition function"
" will not provide a gradient to these points.",
BotorchWarning,
stacklevel=2,
)
X_pending = X_pending.detach().clone()
if self.cache_pending:
Expand Down
1 change: 1 addition & 0 deletions botorch/utils/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,7 @@ def _verify_output_shape(acqf: Any, X: Tensor, output: Tensor) -> bool:
f"of X, but got output with shape {output.shape} for X with shape "
f"{X.shape}. Make sure that this is the intended behavior!",
RuntimeWarning,
stacklevel=3,
)
return True

Expand Down
2 changes: 1 addition & 1 deletion test/exceptions/test_warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def test_botorch_warnings(self):
UserInputWarning,
):
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
warnings.warn("message", WarningClass)
warnings.warn("message", WarningClass, stacklevel=1)
self.assertEqual(len(ws), 1)
self.assertTrue(issubclass(ws[-1].category, WarningClass))
self.assertTrue("message" in str(ws[-1].message))
8 changes: 4 additions & 4 deletions test/test_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,12 @@ def test_debug(self):
with settings.debug(False):
with warnings.catch_warnings(record=True) as ws:
if settings.debug.on():
warnings.warn("test", BotorchWarning)
warnings.warn("test", BotorchWarning, stacklevel=1)
self.assertEqual(len(ws), 0)
# Check that warnings are not suppressed outside of context manager.
with warnings.catch_warnings(record=True) as ws:
if settings.debug.on():
warnings.warn("test", BotorchWarning)
warnings.warn("test", BotorchWarning, stacklevel=1)
self.assertEqual(len(ws), 1)

# Turn off debug.
Expand All @@ -45,12 +45,12 @@ def test_debug(self):
with settings.debug(True):
with warnings.catch_warnings(record=True) as ws:
if settings.debug.on():
warnings.warn("test", BotorchWarning)
warnings.warn("test", BotorchWarning, stacklevel=1)
self.assertEqual(len(ws), 1)
# Check that warnings are suppressed outside of context manager.
with warnings.catch_warnings(record=True) as ws:
if settings.debug.on():
warnings.warn("test", BotorchWarning)
warnings.warn("test", BotorchWarning, stacklevel=1)
self.assertEqual(len(ws), 0)


Expand Down

0 comments on commit 86687c4

Please sign in to comment.