Skip to content

Commit

Permalink
EXAMPLES: fix broken examples
Browse files Browse the repository at this point in the history
  • Loading branch information
ColmTalbot committed Dec 8, 2022
1 parent e73045f commit 5ae22fb
Show file tree
Hide file tree
Showing 5 changed files with 265 additions and 66 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,6 @@ MANIFEST
*.dat
*.version
*.ipynb_checkpoints
outdir/*
**/outdir
.idea/*
bilby/_version.py
9 changes: 8 additions & 1 deletion bilby/core/sampler/dynesty.py
Original file line number Diff line number Diff line change
Expand Up @@ -725,7 +725,7 @@ def plot_current_state(self):
def _run_test(self):
import pandas as pd

self.sampler = self.sampler_class(
self.sampler = self.sampler_init(
loglikelihood=self.log_likelihood,
prior_transform=self.prior_transform,
ndim=self.ndim,
Expand All @@ -734,7 +734,14 @@ def _run_test(self):
sampler_kwargs = self.sampler_function_kwargs.copy()
sampler_kwargs["maxiter"] = 2

if self.print_method == "tqdm" and self.kwargs["print_progress"]:
from tqdm.auto import tqdm

self.pbar = tqdm(file=sys.stdout, initial=self.sampler.it)
self.sampler.run_nested(**sampler_kwargs)
if self.pbar is not None:
self.pbar = self.pbar.close()
print("")
N = 100
self.result.samples = pd.DataFrame(self.priors.sample(N))[
self.search_parameter_keys
Expand Down
59 changes: 0 additions & 59 deletions examples/core_examples/conditional_prior.py

This file was deleted.

32 changes: 27 additions & 5 deletions examples/gw_examples/injection_examples/relative_binning.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
and distance using a uniform in comoving volume prior on luminosity distance
between luminosity distances of 100Mpc and 5Gpc, the cosmology is Planck15.
"""
from copy import deepcopy

import bilby
import numpy as np
Expand Down Expand Up @@ -109,22 +110,33 @@
# Perform a check that the prior does not extend to a parameter space longer than the data
priors.validate_prior(duration, minimum_frequency)

# Set up the fiducial parameters for the relative binning likelihood to be the
# injected parameters. Note that because we sample in chirp mass and mass ratio
# but injected with mass_1 and mass_2, we need to convert the mass parameters
fiducial_parameters = injection_parameters.copy()
m1 = fiducial_parameters.pop("mass_1")
m2 = fiducial_parameters.pop("mass_2")
fiducial_parameters["chirp_mass"] = bilby.gw.conversion.component_masses_to_chirp_mass(
m1, m2
)
fiducial_parameters["mass_ratio"] = m2 / m1

# Initialise the likelihood by passing in the interferometer data (ifos) and
# the waveform generator
likelihood = bilby.gw.likelihood.RelativeBinningGravitationalWaveTransient(
interferometers=ifos,
waveform_generator=waveform_generator,
priors=priors,
distance_marginalization=True,
fiducial_parameters=injection_parameters,
fiducial_parameters=fiducial_parameters,
)

# Run sampler. In this case we're going to use the `dynesty` sampler
# Run sampler. In this case, we're going to use the `nestle` sampler
result = bilby.run_sampler(
likelihood=likelihood,
priors=priors,
sampler="nestle",
npoints=100,
npoints=1000,
injection_parameters=injection_parameters,
outdir=outdir,
label=label,
Expand Down Expand Up @@ -158,5 +170,15 @@
)
print(f"Binned vs unbinned log Bayes factor {np.log(np.mean(weights)):.2f}")

# Make a corner plot.
# result.plot_corner()
# Generate result object with the posterior for the regular likelihood using
# rejection sampling
alt_result = deepcopy(result)
keep = weights > np.random.uniform(0, max(weights), len(weights))
alt_result.posterior = result.posterior.iloc[keep]

# Make a comparison corner plot.
bilby.core.result.plot_multiple(
[result, alt_result],
labels=["Binned", "Reweighted"],
filename=f"{outdir}/{label}_corner.png",
)
229 changes: 229 additions & 0 deletions examples/tutorials/conditional_priors.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,229 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Conditional prior demonstration\n",
"\n",
"Conditional priors enable inference to be performed with priors that correlate different parameters.\n",
"In this notebook, we demonstrate two uses of this: maintaining a two-dimensional distribution while changing the parameterization to be more efficient for sampling, and enforcing an ordering between parameters.\n",
"\n",
"Many cases where `Conditional` priors are useful can also be expressed with `Constraint` priors, however the conditional approach can improve sampling efficiency."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import pandas as pd\n",
"from bilby.core.prior import (\n",
" Prior, PriorDict, ConditionalPriorDict,\n",
" Uniform, ConditionalUniform, Constraint, \n",
")\n",
"from corner import corner\n",
"from scipy.stats import semicircular\n",
"\n",
"\n",
"%matplotlib inline"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Sampling from a disc\n",
"\n",
"Our first example is sampling uniformly from a disc\n",
"\n",
"$$p(x, y) = \\frac{1}{\\pi}; x^2 + y+2 \\leq 1.$$\n",
"\n",
"A naive implementation of this would define a uniform prior over a square over `[-1, 1]` and then reject points that don't satisfy the radius constraint.\n",
"Naive sampling from this parameterization would have an efficiency of $\\pi / 4$.\n",
"\n",
"If we instead consider the marginal distribution $p(x)$ and conditional distribution $p(y | x)$, we can achieve a sampling efficiency of 100%.\n",
"\n",
"$$\n",
"p(x) = \\int_{-1 + \\sqrt{x^2 + y^2}}^{1 - \\sqrt{x^2 + y^2}} dy p(x, y) = \\frac{2 (1 - \\sqrt{x^2 + y^2})}{\\pi} \\\\\n",
"p(y | x) = \\frac{1}{2 (1 - \\sqrt{x^2})}\n",
"$$\n",
"\n",
"The marginal distribution for $x$ is the [Wigner semicircle distribution](https://en.wikipedia.org/wiki/Wigner_semicircle_distribution), this distribution is not currently defined in `Bilby`, but we can wrap the `scipy` implementation.\n",
"The conditional distribution for $y$ is implemented in `Bilby` as the `ConditionUnifrom`, we just need to define the condition function."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"class SemiCircular(Prior):\n",
"\n",
" def __init__(self, radius=1, center=0, name=None, latex_label=None, unit=None, boundary=None):\n",
" super(SemiCircular, self).__init__(\n",
" minimum=center - radius,\n",
" maximum=center + radius,\n",
" name=name,\n",
" latex_label=latex_label,\n",
" unit=unit,\n",
" boundary=boundary,\n",
" )\n",
" self.radius = radius\n",
" self.center = center\n",
" self._dist = semicircular(loc=center, scale=radius)\n",
"\n",
" def prob(self, val):\n",
" return self._dist.pdf(val)\n",
"\n",
" def ln_prob(self, val):\n",
" return self._dist.logpdf(val)\n",
"\n",
" def cdf(self, val):\n",
" return self._dist.cdf(val)\n",
"\n",
" def rescale(self, val):\n",
" return self._dist.ppf(val)\n",
"\n",
"\n",
"def conditional_func_y(reference_parameters, x):\n",
" condition = np.sqrt(reference_parameters[\"maximum\"]-x**2)\n",
" return dict(minimum=-condition, maximum=condition)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"#### Sample from the distribution\n",
"\n",
"To demonstrate the equivalence of the two methods, we will draw samples from the distribution using the two methods and verify that they agree."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"N = int(2e4)\n",
"\n",
"CORNER_KWARGS = dict(\n",
" plot_contours=False,\n",
" plot_density=False,\n",
" fill_contours=False,\n",
" max_n_ticks=3,\n",
" verbose=False,\n",
" use_math_text=True,\n",
")\n",
"\n",
"\n",
"def convert_to_radial(parameters):\n",
" p = parameters.copy()\n",
" p['r'] = p['x']**2 + p['y']**2\n",
" return p\n",
"\n",
"def sample_circle_with_constraint():\n",
" d = PriorDict(\n",
" dictionary=dict(\n",
" x=Uniform(-1, 1),\n",
" y=Uniform(-1, 1),\n",
" r=Constraint(0, 1),\n",
" ),\n",
" conversion_function=convert_to_radial\n",
" )\n",
" return pd.DataFrame(d.sample(N))\n",
"\n",
"\n",
"def sample_circle_with_conditional():\n",
" d = ConditionalPriorDict(\n",
" dictionary=dict(\n",
" x=SemiCircular(),\n",
" y=ConditionalUniform(\n",
" condition_func=conditional_func_y, \n",
" minimum=-1, maximum=1\n",
" )\n",
" )\n",
" )\n",
" return pd.DataFrame(d.sample(N))\n",
"\n",
"\n",
"s1 = sample_circle_with_constraint()\n",
"s2 = sample_circle_with_conditional()\n",
"fig = corner(s1.values, **CORNER_KWARGS, color=\"tab:blue\", labels=[\"$x$\", \"$y$\"])\n",
"corner(s2.values, **CORNER_KWARGS, color=\"tab:green\", fig=fig)\n",
"plt.show()\n",
"plt.close()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Sampling from ordered distributions\n",
"\n",
"As our second example, we demonstrate defining a prior distribution over a set of strictly ordered parameters.\n",
"\n",
"We note that in this case, we do not require that the marginal distributions for each of the parameters are independently and identically disributed, although this can be fairly simply remedied."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"class BoundedUniform(ConditionalUniform):\n",
" \"\"\"Conditional Uniform prior where prior sample < previous prior sample\n",
" \n",
" This is ensured by fixing the maximum bound to be the previous prior sample value.\n",
" \"\"\"\n",
" def __init__(self, idx: int, minimum, maximum, name=None, latex_label=None,\n",
" unit=None, boundary=None):\n",
" super(BoundedUniform, self).__init__(\n",
" minimum=minimum, maximum=maximum, name=name, \n",
" latex_label=latex_label, unit=unit,\n",
" boundary=boundary, condition_func=self.bounds_condition\n",
" )\n",
" self.idx = idx\n",
" self.previous_name = f\"{name[:-1]}{self.idx - 1}\"\n",
" self._required_variables = [self.previous_name] \n",
" # this is used in prior.sample(... **required_variables)\n",
"\n",
"\n",
" def bounds_condition(self, reference_params, **required_variables):\n",
" previous_sample = required_variables[self.previous_name]\n",
" return dict(maximum=previous_sample)\n",
"\n",
"\n",
"def make_uniform_conditonal_priordict(n_priors=3):\n",
" priors = ConditionalPriorDict()\n",
" for i in range(n_priors):\n",
" if i==0:\n",
" priors[f\"uni{i}\"] = Uniform(minimum=0, maximum=1, name=f\"uni{i}\")\n",
" else:\n",
" priors[f\"uni{i}\"] = BoundedUniform(idx=i, minimum=0, maximum=1, name=f\"uni{i}\")\n",
" return priors\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"samples = pd.DataFrame(make_uniform_conditonal_priordict(3).sample(10000))\n",
"fig = corner(samples.values, **CORNER_KWARGS, color=\"tab:blue\", labels=[f\"$A_{ii}$\" for ii in range(3)])\n",
"plt.show()\n",
"plt.close()"
]
}
],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 2
}

0 comments on commit 5ae22fb

Please sign in to comment.