From 2827882a647af860d66c1d1baad55ada28455501 Mon Sep 17 00:00:00 2001 From: Alec Bills Date: Wed, 11 Oct 2023 17:00:24 -0700 Subject: [PATCH 1/6] add uniform log prior --- pints/_log_priors.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/pints/_log_priors.py b/pints/_log_priors.py index eb669ab98..eeae8f981 100644 --- a/pints/_log_priors.py +++ b/pints/_log_priors.py @@ -1344,3 +1344,34 @@ def n_parameters(self): def sample(self, n=1): """ See :meth:`LogPrior.sample()`. """ return self._boundaries.sample(n) + +class LogUniformLogPrior(pints.LogPrior): + def __init__(self, a, b): + self._a = a + self._b = b + #constant for S1 evaluation + self._c = np.divide(1, np.log(np.divide(b, a))) + + def __call__(self, x): + return scipy.stats.loguniform.logpdf(x, self._a, self._b) + + def cdf(self, x): + return scipy.stats.loguniform.cdf(x, self._a, self._b) + + def icdf(self, p): + return scipy.stats.loguniform.ppf(p, self._a, self._b) + + def evaluateS1(self, x): + dp = - self._c * np.power(x, -2) + # Set values outside limits to nan + dp[(np.asarray(x) < self._a) | (np.asarray(x) > self._b)] = np.nan + return self(x), dp + + def mean(self): + return scipy.stats.loguniform.mean(self._a, self._b) + + def n_parameters(self): + return 1 + + def sample(self, n=1): + return scipy.stats.loguniform.rvs(self._a, self._b, size=(n, 1)) From 20c0c7b2fd44dad8a365c7a69212b3bdba562119 Mon Sep 17 00:00:00 2001 From: Alec Bills Date: Tue, 17 Oct 2023 17:04:50 -0700 Subject: [PATCH 2/6] test and implement loguniformlogprior --- pints/__init__.py | 1 + pints/_log_priors.py | 9 ++++++- pints/tests/test_log_priors.py | 44 ++++++++++++++++++++++++++++++++++ 3 files changed, 53 insertions(+), 1 deletion(-) diff --git a/pints/__init__.py b/pints/__init__.py index e6448e64a..e8c7a6895 100644 --- a/pints/__init__.py +++ b/pints/__init__.py @@ -105,6 +105,7 @@ def version(formatted=False): StudentTLogPrior, TruncatedGaussianLogPrior, UniformLogPrior, + LogUniformLogPrior ) # diff --git a/pints/_log_priors.py b/pints/_log_priors.py index eeae8f981..7e3ce30ef 100644 --- a/pints/_log_priors.py +++ b/pints/_log_priors.py @@ -1347,6 +1347,11 @@ def sample(self, n=1): class LogUniformLogPrior(pints.LogPrior): def __init__(self, a, b): + if a <= 0: + raise ValueError("a must be > 0") + if b <= a: + raise ValueError("b must be > a > 0") + self._a = a self._b = b #constant for S1 evaluation @@ -1362,7 +1367,9 @@ def icdf(self, p): return scipy.stats.loguniform.ppf(p, self._a, self._b) def evaluateS1(self, x): - dp = - self._c * np.power(x, -2) + dpdfdx = - self._c * np.power(x, -2) + dlogdx = 1/scipy.stats.loguniform.pdf(x, self._a, self._b) + dp = np.array(dpdfdx*dlogdx) # Set values outside limits to nan dp[(np.asarray(x) < self._a) | (np.asarray(x) > self._b)] = np.nan return self(x), dp diff --git a/pints/tests/test_log_priors.py b/pints/tests/test_log_priors.py index 499ac341d..0d1710c81 100755 --- a/pints/tests/test_log_priors.py +++ b/pints/tests/test_log_priors.py @@ -594,6 +594,35 @@ def test_inverse_gamma_prior_sampling(self): # roughly the case, to ensure the parameterisation is correct mean = np.mean(samples1).item() self.assertTrue(9. < mean < 11.) + + def test_log_uniform_prior(self): + + #Test input parameters + self.assertRaises(ValueError, pints.LogUniformLogPrior, 0, 1) + self.assertRaises(ValueError, pints.LogUniformLogPrior, 1, 1) + + a = 1e-2 + b = 1e2 + + p = pints.LogUniformLogPrior(a, b) + + #all values below were calculated separately (not by scipy) + self.assertAlmostEqual(p.mean(), 10.856276311376536) + + #test n_parameters + self.assertEqual(p.n_parameters(), 1) + + points = [0.1, 63.0] + vals = [0.08225828662619909, -6.36346153275938] + dvals = [-10.0, -0.015873015873015872] + + for point, val, dval in zip(points, vals, dvals): + test_val_1, test_dval = p.evaluateS1(point) + test_val_2 = p(point) + self.assertEqual(test_val_1, test_val_2) + self.assertAlmostEqual(test_val_1, val) + self.assertAlmostEqual(test_dval, dval) + def test_log_normal_prior(self): @@ -656,6 +685,21 @@ def test_log_normal_prior(self): self.assertAlmostEqual(pints_val, scipy_val) self.assertAlmostEqual(pints_deriv[0], hand_calc_deriv) + + def test_log_uniform_prior_cdf_icdf(self): + p1 = pints.LogUniformLogPrior(1e-2, 1e2) + self.assertAlmostEqual(p1.cdf(0.1), 0.25) + self.assertAlmostEqual(p1.cdf(10), 0.75) + self.assertAlmostEqual(p1.icdf(0.25), 0.1) + self.assertAlmostEqual(p1.icdf(0.75), 10.0) + + def test_log_uniform_prior_sampling(self): + p1 = pints.LogUniformLogPrior(1e-2, 1e2) + samples = p1.sample(1000000) + mean = p1.mean() + sample_mean = np.mean(samples) + self.assertEqual(len(samples), 1000000) + self.assertLessEqual(np.abs(sample_mean - mean), 0.1) def test_log_normal_prior_cdf_icdf(self): p1 = pints.LogNormalLogPrior(-3.5, 7.7) From bfc559d9fd3f6f165b562ea41da8bc182e926beb Mon Sep 17 00:00:00 2001 From: Alec Bills Date: Wed, 18 Oct 2023 12:46:02 -0700 Subject: [PATCH 3/6] docs --- docs/source/log_priors.rst | 3 +++ pints/_log_priors.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/docs/source/log_priors.rst b/docs/source/log_priors.rst index 03a5291ce..7423cb03d 100644 --- a/docs/source/log_priors.rst +++ b/docs/source/log_priors.rst @@ -28,6 +28,7 @@ Overview: - :class:`StudentTLogPrior` - :class:`TruncatedGaussianLogPrior` - :class:`UniformLogPrior` +- :class:`LogUniformLogPrior` .. autoclass:: BetaLogPrior @@ -57,3 +58,5 @@ Overview: .. autoclass:: TruncatedGaussianLogPrior .. autoclass:: UniformLogPrior + +.. autoclass:: LogUniformLogPrior diff --git a/pints/_log_priors.py b/pints/_log_priors.py index 7e3ce30ef..a8c5bc00f 100644 --- a/pints/_log_priors.py +++ b/pints/_log_priors.py @@ -1346,6 +1346,31 @@ def sample(self, n=1): return self._boundaries.sample(n) class LogUniformLogPrior(pints.LogPrior): + r""" + Defines a log-uniform prior over a given range. + + The range includes the lower and upper boundaries, so that any + point ``x`` with a non-zero prior must have ``a <= x < b``. + + In 1D this has pdf + + .. math:: + f(x|a,b)=\begin{cases}0,&\text{if }x\not\in + [a,b]\\\frac{1}{x \log(\frac{b}{a})} + ,&\text{if }x\in[a,b]\end{cases}. + + A random variable :math:`X` distributed according to this pdf has + expectation + + .. math:: + \mathrm{E}(X)=\frac{b-a}{\log(b/a)}. + + For example, to create a prior with :math:`x\in[1e-2,1e2]`, use either:: + + p = pints.LogUniformLogPrior(1e-2, 1e2) + + Extends :class:`LogPrior`. + """ def __init__(self, a, b): if a <= 0: raise ValueError("a must be > 0") @@ -1361,12 +1386,15 @@ def __call__(self, x): return scipy.stats.loguniform.logpdf(x, self._a, self._b) def cdf(self, x): + """ See :meth:`LogPrior.cdf()`. """ return scipy.stats.loguniform.cdf(x, self._a, self._b) def icdf(self, p): + """ See :meth:`LogPrior.icdf()`. """ return scipy.stats.loguniform.ppf(p, self._a, self._b) def evaluateS1(self, x): + """ See :meth:`LogPrior.evaluateS1()`. """ dpdfdx = - self._c * np.power(x, -2) dlogdx = 1/scipy.stats.loguniform.pdf(x, self._a, self._b) dp = np.array(dpdfdx*dlogdx) @@ -1375,10 +1403,13 @@ def evaluateS1(self, x): return self(x), dp def mean(self): + """ See :meth:`LogPrior.mean()`. """ return scipy.stats.loguniform.mean(self._a, self._b) def n_parameters(self): + """ See :meth:`LogPrior.n_parameters()`. """ return 1 def sample(self, n=1): + """ See :meth:`LogPrior.sample()`. """ return scipy.stats.loguniform.rvs(self._a, self._b, size=(n, 1)) From 32987d8e5e8bd0fcb39649faa0070e3124f3733d Mon Sep 17 00:00:00 2001 From: Alec Bills Date: Wed, 18 Oct 2023 12:49:51 -0700 Subject: [PATCH 4/6] style fixes --- pints/_log_priors.py | 9 +++++---- pints/tests/test_log_priors.py | 9 ++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/pints/_log_priors.py b/pints/_log_priors.py index a8c5bc00f..0a12e3f1d 100644 --- a/pints/_log_priors.py +++ b/pints/_log_priors.py @@ -1344,7 +1344,8 @@ def n_parameters(self): def sample(self, n=1): """ See :meth:`LogPrior.sample()`. """ return self._boundaries.sample(n) - + + class LogUniformLogPrior(pints.LogPrior): r""" Defines a log-uniform prior over a given range. @@ -1395,9 +1396,9 @@ def icdf(self, p): def evaluateS1(self, x): """ See :meth:`LogPrior.evaluateS1()`. """ - dpdfdx = - self._c * np.power(x, -2) - dlogdx = 1/scipy.stats.loguniform.pdf(x, self._a, self._b) - dp = np.array(dpdfdx*dlogdx) + dpdfdx = - self._c * np.power(x, - 2) + dlogdx = 1 / scipy.stats.loguniform.pdf(x, self._a, self._b) + dp = np.array(dpdfdx * dlogdx) # Set values outside limits to nan dp[(np.asarray(x) < self._a) | (np.asarray(x) > self._b)] = np.nan return self(x), dp diff --git a/pints/tests/test_log_priors.py b/pints/tests/test_log_priors.py index 0d1710c81..23949fd26 100755 --- a/pints/tests/test_log_priors.py +++ b/pints/tests/test_log_priors.py @@ -594,7 +594,7 @@ def test_inverse_gamma_prior_sampling(self): # roughly the case, to ensure the parameterisation is correct mean = np.mean(samples1).item() self.assertTrue(9. < mean < 11.) - + def test_log_uniform_prior(self): #Test input parameters @@ -605,7 +605,7 @@ def test_log_uniform_prior(self): b = 1e2 p = pints.LogUniformLogPrior(a, b) - + #all values below were calculated separately (not by scipy) self.assertAlmostEqual(p.mean(), 10.856276311376536) @@ -623,7 +623,6 @@ def test_log_uniform_prior(self): self.assertAlmostEqual(test_val_1, val) self.assertAlmostEqual(test_dval, dval) - def test_log_normal_prior(self): # Test input parameters @@ -685,14 +684,14 @@ def test_log_normal_prior(self): self.assertAlmostEqual(pints_val, scipy_val) self.assertAlmostEqual(pints_deriv[0], hand_calc_deriv) - + def test_log_uniform_prior_cdf_icdf(self): p1 = pints.LogUniformLogPrior(1e-2, 1e2) self.assertAlmostEqual(p1.cdf(0.1), 0.25) self.assertAlmostEqual(p1.cdf(10), 0.75) self.assertAlmostEqual(p1.icdf(0.25), 0.1) self.assertAlmostEqual(p1.icdf(0.75), 10.0) - + def test_log_uniform_prior_sampling(self): p1 = pints.LogUniformLogPrior(1e-2, 1e2) samples = p1.sample(1000000) From e11f292c1cd6e7f7ee0f4cf79f792612ad4f2cef Mon Sep 17 00:00:00 2001 From: Alec Bills <48105066+abillscmu@users.noreply.github.com> Date: Fri, 20 Oct 2023 10:18:00 -0700 Subject: [PATCH 5/6] Update pints/_log_priors.py Co-authored-by: ben18785 --- pints/_log_priors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pints/_log_priors.py b/pints/_log_priors.py index 0a12e3f1d..3065b0f1a 100644 --- a/pints/_log_priors.py +++ b/pints/_log_priors.py @@ -1366,7 +1366,7 @@ class LogUniformLogPrior(pints.LogPrior): .. math:: \mathrm{E}(X)=\frac{b-a}{\log(b/a)}. - For example, to create a prior with :math:`x\in[1e-2,1e2]`, use either:: + For example, to create a prior with :math:`x\in[1e-2,1e2]`, use:: p = pints.LogUniformLogPrior(1e-2, 1e2) From 06bf19921297b4b65f1e6b7c2798126f21519652 Mon Sep 17 00:00:00 2001 From: Alec Bills Date: Fri, 20 Oct 2023 10:34:30 -0700 Subject: [PATCH 6/6] code review fixes --- docs/source/log_priors.rst | 6 +- pints/__init__.py | 2 +- pints/_log_priors.py | 138 ++++++++++++++++++------------------- 3 files changed, 72 insertions(+), 74 deletions(-) diff --git a/docs/source/log_priors.rst b/docs/source/log_priors.rst index 7423cb03d..65c1bb0c9 100644 --- a/docs/source/log_priors.rst +++ b/docs/source/log_priors.rst @@ -23,12 +23,12 @@ Overview: - :class:`HalfCauchyLogPrior` - :class:`InverseGammaLogPrior` - :class:`LogNormalLogPrior` +- :class:`LogUniformLogPrior` - :class:`MultivariateGaussianLogPrior` - :class:`NormalLogPrior` - :class:`StudentTLogPrior` - :class:`TruncatedGaussianLogPrior` - :class:`UniformLogPrior` -- :class:`LogUniformLogPrior` .. autoclass:: BetaLogPrior @@ -49,6 +49,8 @@ Overview: .. autoclass:: LogNormalLogPrior +.. autoclass:: LogUniformLogPrior + .. autoclass:: MultivariateGaussianLogPrior .. autoclass:: NormalLogPrior @@ -58,5 +60,3 @@ Overview: .. autoclass:: TruncatedGaussianLogPrior .. autoclass:: UniformLogPrior - -.. autoclass:: LogUniformLogPrior diff --git a/pints/__init__.py b/pints/__init__.py index e8c7a6895..1c1591d5b 100644 --- a/pints/__init__.py +++ b/pints/__init__.py @@ -100,12 +100,12 @@ def version(formatted=False): HalfCauchyLogPrior, InverseGammaLogPrior, LogNormalLogPrior, + LogUniformLogPrior, MultivariateGaussianLogPrior, NormalLogPrior, StudentTLogPrior, TruncatedGaussianLogPrior, UniformLogPrior, - LogUniformLogPrior ) # diff --git a/pints/_log_priors.py b/pints/_log_priors.py index 3065b0f1a..77edbac65 100644 --- a/pints/_log_priors.py +++ b/pints/_log_priors.py @@ -748,6 +748,74 @@ def sample(self, n=1): s=self._scale, size=(n, 1)) +class LogUniformLogPrior(pints.LogPrior): + r""" + Defines a log-uniform prior over a given range. + + The range includes the lower and upper boundaries, so that any + point ``x`` with a non-zero prior must have ``0 < a <= x < b``. + + In 1D this has pdf + + .. math:: + f(x|a,b)=\begin{cases}0,&\text{if }x\not\in + [a,b]\\\frac{1}{x \log(\frac{b}{a})} + ,&\text{if }x\in[a,b]\end{cases}. + + A random variable :math:`X` distributed according to this pdf has + expectation + + .. math:: + \mathrm{E}(X)=\frac{b-a}{\log(b/a)}. + + For example, to create a prior with :math:`x\in[1e-2,1e2]`, use:: + + p = pints.LogUniformLogPrior(1e-2, 1e2) + + Extends :class:`LogPrior`. + """ + def __init__(self, a, b): + if a <= 0: + raise ValueError("a must be > 0") + if b <= a: + raise ValueError("b must be > a > 0") + + self._a = a + self._b = b + #constant for S1 evaluation + self._c = np.divide(1, np.log(np.divide(b, a))) + + def __call__(self, x): + return scipy.stats.loguniform.logpdf(x, self._a, self._b) + + def cdf(self, x): + """ See :meth:`LogPrior.cdf()`. """ + return scipy.stats.loguniform.cdf(x, self._a, self._b) + + def icdf(self, p): + """ See :meth:`LogPrior.icdf()`. """ + return scipy.stats.loguniform.ppf(p, self._a, self._b) + + def evaluateS1(self, x): + """ See :meth:`LogPrior.evaluateS1()`. """ + dp = np.array(- 1 / x) + # Set values outside limits to nan + dp[(np.asarray(x) < self._a) | (np.asarray(x) > self._b)] = np.nan + return self(x), dp + + def mean(self): + """ See :meth:`LogPrior.mean()`. """ + return scipy.stats.loguniform.mean(self._a, self._b) + + def n_parameters(self): + """ See :meth:`LogPrior.n_parameters()`. """ + return 1 + + def sample(self, n=1): + """ See :meth:`LogPrior.sample()`. """ + return scipy.stats.loguniform.rvs(self._a, self._b, size=(n, 1)) + + class MultivariateGaussianLogPrior(pints.LogPrior): r""" Defines a multivariate Gaussian (log) prior with a given ``mean`` and @@ -1344,73 +1412,3 @@ def n_parameters(self): def sample(self, n=1): """ See :meth:`LogPrior.sample()`. """ return self._boundaries.sample(n) - - -class LogUniformLogPrior(pints.LogPrior): - r""" - Defines a log-uniform prior over a given range. - - The range includes the lower and upper boundaries, so that any - point ``x`` with a non-zero prior must have ``a <= x < b``. - - In 1D this has pdf - - .. math:: - f(x|a,b)=\begin{cases}0,&\text{if }x\not\in - [a,b]\\\frac{1}{x \log(\frac{b}{a})} - ,&\text{if }x\in[a,b]\end{cases}. - - A random variable :math:`X` distributed according to this pdf has - expectation - - .. math:: - \mathrm{E}(X)=\frac{b-a}{\log(b/a)}. - - For example, to create a prior with :math:`x\in[1e-2,1e2]`, use:: - - p = pints.LogUniformLogPrior(1e-2, 1e2) - - Extends :class:`LogPrior`. - """ - def __init__(self, a, b): - if a <= 0: - raise ValueError("a must be > 0") - if b <= a: - raise ValueError("b must be > a > 0") - - self._a = a - self._b = b - #constant for S1 evaluation - self._c = np.divide(1, np.log(np.divide(b, a))) - - def __call__(self, x): - return scipy.stats.loguniform.logpdf(x, self._a, self._b) - - def cdf(self, x): - """ See :meth:`LogPrior.cdf()`. """ - return scipy.stats.loguniform.cdf(x, self._a, self._b) - - def icdf(self, p): - """ See :meth:`LogPrior.icdf()`. """ - return scipy.stats.loguniform.ppf(p, self._a, self._b) - - def evaluateS1(self, x): - """ See :meth:`LogPrior.evaluateS1()`. """ - dpdfdx = - self._c * np.power(x, - 2) - dlogdx = 1 / scipy.stats.loguniform.pdf(x, self._a, self._b) - dp = np.array(dpdfdx * dlogdx) - # Set values outside limits to nan - dp[(np.asarray(x) < self._a) | (np.asarray(x) > self._b)] = np.nan - return self(x), dp - - def mean(self): - """ See :meth:`LogPrior.mean()`. """ - return scipy.stats.loguniform.mean(self._a, self._b) - - def n_parameters(self): - """ See :meth:`LogPrior.n_parameters()`. """ - return 1 - - def sample(self, n=1): - """ See :meth:`LogPrior.sample()`. """ - return scipy.stats.loguniform.rvs(self._a, self._b, size=(n, 1))