diff --git a/botorch/acquisition/acquisition.py b/botorch/acquisition/acquisition.py index 60c5558482..34f2016a05 100644 --- a/botorch/acquisition/acquisition.py +++ b/botorch/acquisition/acquisition.py @@ -53,6 +53,7 @@ def set_X_pending(self, X_pending: Tensor | None = None) -> None: "Pending points require a gradient but the acquisition function" " will not provide a gradient to these points.", BotorchWarning, + stacklevel=2, ) self.X_pending = X_pending.detach().clone() else: diff --git a/botorch/acquisition/cached_cholesky.py b/botorch/acquisition/cached_cholesky.py index ca3794fe6e..d47e2a2c50 100644 --- a/botorch/acquisition/cached_cholesky.py +++ b/botorch/acquisition/cached_cholesky.py @@ -87,6 +87,7 @@ def __init__( warnings.warn( _get_cache_root_not_supported_message(type(model)), RuntimeWarning, + stacklevel=3, ) cache_root = False self._cache_root = cache_root @@ -150,6 +151,7 @@ def _get_f_X_samples(self, posterior: GPyTorchPosterior, q_in: int) -> Tensor: "ill-conditioned covariance matrix. " "Falling back to standard sampling.", BotorchWarning, + stacklevel=3, ) # TODO: improve efficiency for multi-task models diff --git a/botorch/acquisition/cost_aware.py b/botorch/acquisition/cost_aware.py index 65b9e53f5b..da60861b5c 100644 --- a/botorch/acquisition/cost_aware.py +++ b/botorch/acquisition/cost_aware.py @@ -209,6 +209,7 @@ def forward( warnings.warn( "Encountered negative cost values in InverseCostWeightedUtility", CostAwareWarning, + stacklevel=2, ) # clamp (away from zero) and sum cost across elements of the q-batch - # this will be of shape `num_fantasies x batch_shape` or `batch_shape` diff --git a/botorch/acquisition/decoupled.py b/botorch/acquisition/decoupled.py index 8969bfea6b..8c893b027d 100644 --- a/botorch/acquisition/decoupled.py +++ b/botorch/acquisition/decoupled.py @@ -110,6 +110,7 @@ def set_X_pending( "Pending points require a gradient but the acquisition function" " will not provide a gradient to these points.", BotorchWarning, + stacklevel=2, ) self.X_pending = X_pending.detach().clone() if X_pending_evaluation_mask is not None: diff --git a/botorch/acquisition/multi_objective/utils.py b/botorch/acquisition/multi_objective/utils.py index 9f1c87808b..cca67ad55e 100644 --- a/botorch/acquisition/multi_objective/utils.py +++ b/botorch/acquisition/multi_objective/utils.py @@ -61,7 +61,11 @@ def get_default_partitioning_alpha(num_objectives: int) -> float: if num_objectives <= 4: return 0.0 elif num_objectives > 6: - warnings.warn("EHVI works best for less than 7 objectives.", BotorchWarning) + warnings.warn( + "EHVI works best for less than 7 objectives.", + BotorchWarning, + stacklevel=3, + ) return 10 ** (-8 + num_objectives) diff --git a/botorch/acquisition/multi_step_lookahead.py b/botorch/acquisition/multi_step_lookahead.py index 808b767cc9..12056acdbc 100644 --- a/botorch/acquisition/multi_step_lookahead.py +++ b/botorch/acquisition/multi_step_lookahead.py @@ -552,6 +552,7 @@ def _construct_inner_samplers( warnings.warn( "inner_mc_samples is ignored for analytic acquisition functions", BotorchWarning, + stacklevel=3, ) inner_samplers.append(None) else: diff --git a/botorch/models/model.py b/botorch/models/model.py index 1c36aee1ad..8d87fec021 100644 --- a/botorch/models/model.py +++ b/botorch/models/model.py @@ -236,6 +236,7 @@ def _set_transformed_inputs(self) -> None: "attribute. Make sure that the `input_transform` is applied to " "both the train inputs and test inputs.", RuntimeWarning, + stacklevel=3, ) def _revert_to_original_inputs(self) -> None: diff --git a/botorch/utils/multi_objective/hypervolume.py b/botorch/utils/multi_objective/hypervolume.py index 7341a5eca3..f854e9f5d8 100644 --- a/botorch/utils/multi_objective/hypervolume.py +++ b/botorch/utils/multi_objective/hypervolume.py @@ -789,6 +789,7 @@ def set_X_pending(self, X_pending: Tensor | None = None) -> None: "Pending points require a gradient but the acquisition function" " will not provide a gradient to these points.", BotorchWarning, + stacklevel=2, ) X_pending = X_pending.detach().clone() if self.cache_pending: diff --git a/botorch/utils/transforms.py b/botorch/utils/transforms.py index ad48aa5ffa..01f34c0da4 100644 --- a/botorch/utils/transforms.py +++ b/botorch/utils/transforms.py @@ -190,6 +190,7 @@ def _verify_output_shape(acqf: Any, X: Tensor, output: Tensor) -> bool: f"of X, but got output with shape {output.shape} for X with shape " f"{X.shape}. Make sure that this is the intended behavior!", RuntimeWarning, + stacklevel=3, ) return True diff --git a/test/exceptions/test_warnings.py b/test/exceptions/test_warnings.py index ddf1a7095d..0415404aeb 100644 --- a/test/exceptions/test_warnings.py +++ b/test/exceptions/test_warnings.py @@ -43,7 +43,7 @@ def test_botorch_warnings(self): UserInputWarning, ): with warnings.catch_warnings(record=True) as ws, settings.debug(True): - warnings.warn("message", WarningClass) + warnings.warn("message", WarningClass, stacklevel=1) self.assertEqual(len(ws), 1) self.assertTrue(issubclass(ws[-1].category, WarningClass)) self.assertTrue("message" in str(ws[-1].message)) diff --git a/test/test_settings.py b/test/test_settings.py index 69668ae76e..b1d5ef6d26 100644 --- a/test/test_settings.py +++ b/test/test_settings.py @@ -31,12 +31,12 @@ def test_debug(self): with settings.debug(False): with warnings.catch_warnings(record=True) as ws: if settings.debug.on(): - warnings.warn("test", BotorchWarning) + warnings.warn("test", BotorchWarning, stacklevel=1) self.assertEqual(len(ws), 0) # Check that warnings are not suppressed outside of context manager. with warnings.catch_warnings(record=True) as ws: if settings.debug.on(): - warnings.warn("test", BotorchWarning) + warnings.warn("test", BotorchWarning, stacklevel=1) self.assertEqual(len(ws), 1) # Turn off debug. @@ -45,12 +45,12 @@ def test_debug(self): with settings.debug(True): with warnings.catch_warnings(record=True) as ws: if settings.debug.on(): - warnings.warn("test", BotorchWarning) + warnings.warn("test", BotorchWarning, stacklevel=1) self.assertEqual(len(ws), 1) # Check that warnings are suppressed outside of context manager. with warnings.catch_warnings(record=True) as ws: if settings.debug.on(): - warnings.warn("test", BotorchWarning) + warnings.warn("test", BotorchWarning, stacklevel=1) self.assertEqual(len(ws), 0)