Skip to content

Commit

Permalink
Bug: Enable SaasPyroModel to sample from prior (#2465)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #2465

With no observations, FBGPs should still be able to fit the model by sampling from the prior. This solution allows that to happen through fit_fully_bayesian_model_nuts.

Note that this is much slower (almost instant vs. NUTS) than simply sampling from the individual priors, but keeps a consistent interface.

Reviewed By: saitcakmak

Differential Revision: D61145689

fbshipit-source-id: 042f1626d20abdf49c60e2ec8396d106f43654f7
  • Loading branch information
Carl Hvarfner authored and facebook-github-bot committed Aug 13, 2024
1 parent 3f5fcd6 commit a7be6cb
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 8 deletions.
17 changes: 9 additions & 8 deletions botorch/models/fully_bayesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,14 +178,15 @@ def sample(self) -> None:
lengthscale = self.sample_lengthscale(dim=self.ard_num_dims, **tkwargs)
K = matern52_kernel(X=self.train_X, lengthscale=lengthscale)
K = outputscale * K + noise * torch.eye(self.train_X.shape[0], **tkwargs)
pyro.sample(
"Y",
pyro.distributions.MultivariateNormal(
loc=mean.view(-1).expand(self.train_X.shape[0]),
covariance_matrix=K,
),
obs=self.train_Y.squeeze(-1),
)
if self.train_Y.shape[-2] > 0:
pyro.sample(
"Y",
pyro.distributions.MultivariateNormal(
loc=mean.view(-1).expand(self.train_X.shape[0]),
covariance_matrix=K,
),
obs=self.train_Y.squeeze(-1),
)

def sample_outputscale(
self, concentration: float = 2.0, rate: float = 0.15, **tkwargs: Any
Expand Down
10 changes: 10 additions & 0 deletions test/models/test_fully_bayesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,16 @@ def test_fit_model(self):
self.assertIsNone(model.covar_module)
self.assertIsNone(model.likelihood)

def test_empty(self):
model = SaasFullyBayesianSingleTaskGP(
train_X=torch.rand(0, 3),
train_Y=torch.rand(0, 1),
)
fit_fully_bayesian_model_nuts(
model, warmup_steps=2, num_samples=6, thinning=3, disable_progbar=True
)
self.assertEqual(model.covar_module.outputscale.shape, torch.Size([2]))

def test_transforms(self):
for infer_noise in [True, False]:
tkwargs = {"device": self.device, "dtype": torch.double}
Expand Down

0 comments on commit a7be6cb

Please sign in to comment.