diff --git a/docs/source/log_priors.rst b/docs/source/log_priors.rst
index 03a5291ce..65c1bb0c9 100644
--- a/docs/source/log_priors.rst
+++ b/docs/source/log_priors.rst
@@ -23,6 +23,7 @@ Overview:
 - :class:`HalfCauchyLogPrior`
 - :class:`InverseGammaLogPrior`
 - :class:`LogNormalLogPrior`
+- :class:`LogUniformLogPrior`
 - :class:`MultivariateGaussianLogPrior`
 - :class:`NormalLogPrior`
 - :class:`StudentTLogPrior`
@@ -48,6 +49,8 @@ Overview:
 
 .. autoclass:: LogNormalLogPrior
 
+.. autoclass:: LogUniformLogPrior
+
 .. autoclass:: MultivariateGaussianLogPrior
 
 .. autoclass:: NormalLogPrior
diff --git a/pints/__init__.py b/pints/__init__.py
index e6448e64a..1c1591d5b 100644
--- a/pints/__init__.py
+++ b/pints/__init__.py
@@ -100,6 +100,7 @@ def version(formatted=False):
     HalfCauchyLogPrior,
     InverseGammaLogPrior,
     LogNormalLogPrior,
+    LogUniformLogPrior,
     MultivariateGaussianLogPrior,
     NormalLogPrior,
     StudentTLogPrior,
diff --git a/pints/_log_priors.py b/pints/_log_priors.py
index eb669ab98..77edbac65 100644
--- a/pints/_log_priors.py
+++ b/pints/_log_priors.py
@@ -748,6 +748,74 @@ def sample(self, n=1):
                                        s=self._scale, size=(n, 1))
 
 
+class LogUniformLogPrior(pints.LogPrior):
+    r"""
+    Defines a log-uniform prior over a given range.
+
+    The range includes the lower and upper boundaries, so that any
+    point ``x`` with a non-zero prior must have ``0 < a <= x < b``.
+
+    In 1D this has pdf
+
+    .. math::
+        f(x|a,b)=\begin{cases}0,&\text{if }x\not\in
+        [a,b]\\\frac{1}{x \log(\frac{b}{a})}
+        ,&\text{if }x\in[a,b]\end{cases}.
+
+    A random variable :math:`X` distributed according to this pdf has
+    expectation
+
+    .. math::
+        \mathrm{E}(X)=\frac{b-a}{\log(b/a)}.
+
+    For example, to create a prior with :math:`x\in[1e-2,1e2]`, use::
+
+        p = pints.LogUniformLogPrior(1e-2, 1e2)
+
+    Extends :class:`LogPrior`.
+    """
+    def __init__(self, a, b):
+        if a <= 0:
+            raise ValueError("a must be > 0")
+        if b <= a:
+            raise ValueError("b must be > a > 0")
+
+        self._a = a
+        self._b = b
+        #constant for S1 evaluation
+        self._c = np.divide(1, np.log(np.divide(b, a)))
+
+    def __call__(self, x):
+        return scipy.stats.loguniform.logpdf(x, self._a, self._b)
+
+    def cdf(self, x):
+        """ See :meth:`LogPrior.cdf()`. """
+        return scipy.stats.loguniform.cdf(x, self._a, self._b)
+
+    def icdf(self, p):
+        """ See :meth:`LogPrior.icdf()`. """
+        return scipy.stats.loguniform.ppf(p, self._a, self._b)
+
+    def evaluateS1(self, x):
+        """ See :meth:`LogPrior.evaluateS1()`. """
+        dp = np.array(- 1 / x)
+        # Set values outside limits to nan
+        dp[(np.asarray(x) < self._a) | (np.asarray(x) > self._b)] = np.nan
+        return self(x), dp
+
+    def mean(self):
+        """ See :meth:`LogPrior.mean()`. """
+        return scipy.stats.loguniform.mean(self._a, self._b)
+
+    def n_parameters(self):
+        """ See :meth:`LogPrior.n_parameters()`. """
+        return 1
+
+    def sample(self, n=1):
+        """ See :meth:`LogPrior.sample()`. """
+        return scipy.stats.loguniform.rvs(self._a, self._b, size=(n, 1))
+
+
 class MultivariateGaussianLogPrior(pints.LogPrior):
     r"""
     Defines a multivariate Gaussian (log) prior with a given ``mean`` and
diff --git a/pints/tests/test_log_priors.py b/pints/tests/test_log_priors.py
index 499ac341d..23949fd26 100755
--- a/pints/tests/test_log_priors.py
+++ b/pints/tests/test_log_priors.py
@@ -595,6 +595,34 @@ def test_inverse_gamma_prior_sampling(self):
         mean = np.mean(samples1).item()
         self.assertTrue(9. < mean < 11.)
 
+    def test_log_uniform_prior(self):
+
+        #Test input parameters
+        self.assertRaises(ValueError, pints.LogUniformLogPrior, 0, 1)
+        self.assertRaises(ValueError, pints.LogUniformLogPrior, 1, 1)
+
+        a = 1e-2
+        b = 1e2
+
+        p = pints.LogUniformLogPrior(a, b)
+
+        #all values below were calculated separately (not by scipy)
+        self.assertAlmostEqual(p.mean(), 10.856276311376536)
+
+        #test n_parameters
+        self.assertEqual(p.n_parameters(), 1)
+
+        points = [0.1, 63.0]
+        vals = [0.08225828662619909, -6.36346153275938]
+        dvals = [-10.0, -0.015873015873015872]
+
+        for point, val, dval in zip(points, vals, dvals):
+            test_val_1, test_dval = p.evaluateS1(point)
+            test_val_2 = p(point)
+            self.assertEqual(test_val_1, test_val_2)
+            self.assertAlmostEqual(test_val_1, val)
+            self.assertAlmostEqual(test_dval, dval)
+
     def test_log_normal_prior(self):
 
         # Test input parameters
@@ -657,6 +685,21 @@ def test_log_normal_prior(self):
             self.assertAlmostEqual(pints_val, scipy_val)
             self.assertAlmostEqual(pints_deriv[0], hand_calc_deriv)
 
+    def test_log_uniform_prior_cdf_icdf(self):
+        p1 = pints.LogUniformLogPrior(1e-2, 1e2)
+        self.assertAlmostEqual(p1.cdf(0.1), 0.25)
+        self.assertAlmostEqual(p1.cdf(10), 0.75)
+        self.assertAlmostEqual(p1.icdf(0.25), 0.1)
+        self.assertAlmostEqual(p1.icdf(0.75), 10.0)
+
+    def test_log_uniform_prior_sampling(self):
+        p1 = pints.LogUniformLogPrior(1e-2, 1e2)
+        samples = p1.sample(1000000)
+        mean = p1.mean()
+        sample_mean = np.mean(samples)
+        self.assertEqual(len(samples), 1000000)
+        self.assertLessEqual(np.abs(sample_mean - mean), 0.1)
+
     def test_log_normal_prior_cdf_icdf(self):
         p1 = pints.LogNormalLogPrior(-3.5, 7.7)
         self.assertAlmostEqual(p1.cdf(1.1), 0.6797226585187124)