Skip to content

Commit 86687c4

Browse files
saitcakmakfacebook-github-bot
authored andcommitted
Clean up some lints
Summary: Trying out a new tool and cleaning up some linter issues it flagged. Reviewed By: esantorella Differential Revision: D65274533
1 parent 66660e3 commit 86687c4

File tree

11 files changed

+19
-6
lines changed

11 files changed

+19
-6
lines changed

botorch/acquisition/acquisition.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ def set_X_pending(self, X_pending: Tensor | None = None) -> None:
5353
"Pending points require a gradient but the acquisition function"
5454
" will not provide a gradient to these points.",
5555
BotorchWarning,
56+
stacklevel=2,
5657
)
5758
self.X_pending = X_pending.detach().clone()
5859
else:

botorch/acquisition/cached_cholesky.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@ def __init__(
8787
warnings.warn(
8888
_get_cache_root_not_supported_message(type(model)),
8989
RuntimeWarning,
90+
stacklevel=3,
9091
)
9192
cache_root = False
9293
self._cache_root = cache_root
@@ -150,6 +151,7 @@ def _get_f_X_samples(self, posterior: GPyTorchPosterior, q_in: int) -> Tensor:
150151
"ill-conditioned covariance matrix. "
151152
"Falling back to standard sampling.",
152153
BotorchWarning,
154+
stacklevel=3,
153155
)
154156

155157
# TODO: improve efficiency for multi-task models

botorch/acquisition/cost_aware.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -209,6 +209,7 @@ def forward(
209209
warnings.warn(
210210
"Encountered negative cost values in InverseCostWeightedUtility",
211211
CostAwareWarning,
212+
stacklevel=2,
212213
)
213214
# clamp (away from zero) and sum cost across elements of the q-batch -
214215
# this will be of shape `num_fantasies x batch_shape` or `batch_shape`

botorch/acquisition/decoupled.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,7 @@ def set_X_pending(
110110
"Pending points require a gradient but the acquisition function"
111111
" will not provide a gradient to these points.",
112112
BotorchWarning,
113+
stacklevel=2,
113114
)
114115
self.X_pending = X_pending.detach().clone()
115116
if X_pending_evaluation_mask is not None:

botorch/acquisition/multi_objective/utils.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,11 @@ def get_default_partitioning_alpha(num_objectives: int) -> float:
6161
if num_objectives <= 4:
6262
return 0.0
6363
elif num_objectives > 6:
64-
warnings.warn("EHVI works best for less than 7 objectives.", BotorchWarning)
64+
warnings.warn(
65+
"EHVI works best for less than 7 objectives.",
66+
BotorchWarning,
67+
stacklevel=3,
68+
)
6569
return 10 ** (-8 + num_objectives)
6670

6771

botorch/acquisition/multi_step_lookahead.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -552,6 +552,7 @@ def _construct_inner_samplers(
552552
warnings.warn(
553553
"inner_mc_samples is ignored for analytic acquisition functions",
554554
BotorchWarning,
555+
stacklevel=3,
555556
)
556557
inner_samplers.append(None)
557558
else:

botorch/models/model.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,7 @@ def _set_transformed_inputs(self) -> None:
236236
"attribute. Make sure that the `input_transform` is applied to "
237237
"both the train inputs and test inputs.",
238238
RuntimeWarning,
239+
stacklevel=3,
239240
)
240241

241242
def _revert_to_original_inputs(self) -> None:

botorch/utils/multi_objective/hypervolume.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -789,6 +789,7 @@ def set_X_pending(self, X_pending: Tensor | None = None) -> None:
789789
"Pending points require a gradient but the acquisition function"
790790
" will not provide a gradient to these points.",
791791
BotorchWarning,
792+
stacklevel=2,
792793
)
793794
X_pending = X_pending.detach().clone()
794795
if self.cache_pending:

botorch/utils/transforms.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -190,6 +190,7 @@ def _verify_output_shape(acqf: Any, X: Tensor, output: Tensor) -> bool:
190190
f"of X, but got output with shape {output.shape} for X with shape "
191191
f"{X.shape}. Make sure that this is the intended behavior!",
192192
RuntimeWarning,
193+
stacklevel=3,
193194
)
194195
return True
195196

test/exceptions/test_warnings.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def test_botorch_warnings(self):
4343
UserInputWarning,
4444
):
4545
with warnings.catch_warnings(record=True) as ws, settings.debug(True):
46-
warnings.warn("message", WarningClass)
46+
warnings.warn("message", WarningClass, stacklevel=1)
4747
self.assertEqual(len(ws), 1)
4848
self.assertTrue(issubclass(ws[-1].category, WarningClass))
4949
self.assertTrue("message" in str(ws[-1].message))

0 commit comments

Comments
 (0)