Skip to content

Commit

Permalink
Merge pull request #1517 from pints-team/xnes-cov-matrix-update-fix
Browse files Browse the repository at this point in the history
Fixed bug in XNES covariance matrix update.
  • Loading branch information
MichaelClerx authored Jan 23, 2024
2 parents 4f52588 + 040d3d7 commit 13b263a
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 13 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@ All notable changes to this project will be documented in this file.
## Unreleased

### Added
- [#1499](https://github.com/pints-team/pints/pull/1499) Added a log-uniform prior class.
- [#1505](https://github.com/pints-team/pints/pull/1505) Added notes to `ErrorMeasure` and `LogPDF` to say parameters must be real and continuous.
- [#1499](https://github.com/pints-team/pints/pull/1499) Added a log-uniform prior class.
### Changed
- [#1503](https://github.com/pints-team/pints/pull/1503) Stopped showing time units in controller logs, because the units change depending on the output type (see #1467).
### Deprecated
### Removed
### Fixed
- [#1517](https://github.com/pints-team/pints/pull/1517) Fixed a major bug in the covariance matrix update for xNES.
- [#1505](https://github.com/pints-team/pints/pull/1505) Fixed issues with toy problems that accept invalid inputs.


Expand Down
12 changes: 8 additions & 4 deletions pints/_optimisers/_xnes.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ class XNES(pints.PopulationBasedOptimiser):
.. [2] PyBrain: The Python machine learning library
http://pybrain.org
PyBrain is co-authored by xNES' authors.
"""
def __init__(self, x0, sigma0=None, boundaries=None):
super(XNES, self).__init__(x0, sigma0, boundaries)
Expand All @@ -47,7 +49,7 @@ def __init__(self, x0, sigma0=None, boundaries=None):
self._bounded_ids = None # Indices of those xs

# Normalisation / distribution
self._mu = np.array(self._x0) # Mean
self._mu = pints.vector(x0) # Mean
self._A = None # Covariance

# Best solution seen
Expand Down Expand Up @@ -106,13 +108,13 @@ def _initialise(self):
d = self._n_parameters
n = self._population_size

# Learning rates
# Learning rates, see Table 1 in [1]
# TODO Allow changing before run() with method call
self._eta_mu = 1
# TODO Allow changing before run() with method call
self._eta_A = 0.6 * (3 + np.log(d)) * d ** -1.5

# Pre-calculated utilities
# Pre-calculated utilities, see Table 1 in [1]
self._us = np.maximum(0, np.log(n / 2 + 1) - np.log(1 + np.arange(n)))
self._us /= np.sum(self._us)
self._us -= 1 / n
Expand Down Expand Up @@ -162,10 +164,12 @@ def tell(self, fx):
self._mu += self._eta_mu * np.dot(self._A, Gd)

# Update root of covariance matrix
# Note that this is equation 11 (for the eta-sigma=eta-B case), not the
# more general equations 9&10 version given in Algorithm 1
Gm = np.dot(
np.array([np.outer(z, z).T - self._I for z in self._zs]).T,
self._us)
self._A *= scipy.linalg.expm(np.dot(0.5 * self._eta_A, Gm))
self._A = np.dot(self._A, scipy.linalg.expm(0.5 * self._eta_A * Gm))

# Update f_guessed on the assumption that the lowest value in our
# sample approximates f(mu)
Expand Down
17 changes: 9 additions & 8 deletions pints/tests/test_opt_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,7 @@ def test_stopping_max_iterations(self):
def test_logging(self):

# Test with logpdf
np.random.seed(1)
r = pints.toy.TwistedGaussianLogPDF(2, 0.01)
x = np.array([0, 1.01])
b = pints.RectangularBoundaries([-0.01, 0.95], [0.01, 1.05])
Expand All @@ -255,17 +256,17 @@ def test_logging(self):
self.assertEqual(lines[5][:-3],
'0 3 -4.140462 -4.140462 0:0')
self.assertEqual(lines[6][:-3],
'1 6 -4.140462 -4.140465 0:0')
'1 6 -4.140462 -4.140482 0:0')
self.assertEqual(lines[7][:-3],
'2 11 -4.140462 -4.140462 0:0')
'2 9 -4.140462 -4.140465 0:0')
self.assertEqual(lines[8][:-3],
'3 16 -4.140462 -4.140466 0:0')
'3 14 -4.140462 -4.140462 0:0')
self.assertEqual(lines[9][:-3],
'6 33 -4.140462 -4.140462 0:0')
'6 30 -4.140462 -4.140462 0:0')
self.assertEqual(lines[10][:-3],
'9 51 -4.140462 -4.140462 0:0')
'9 47 -4.140462 -4.140463 0:0')
self.assertEqual(lines[11][:-3],
'10 51 -4.140462 -4.140462 0:0')
'10 47 -4.140462 -4.140463 0:0')
self.assertEqual(
lines[12], 'Halting: Maximum number of iterations (10) reached.')

Expand Down Expand Up @@ -448,8 +449,8 @@ def test_post_run_statistics(self):
opt.run()
t_upper = t.time()

self.assertEqual(opt.iterations(), 84)
self.assertEqual(opt.evaluations(), 495)
self.assertEqual(opt.iterations(), 125)
self.assertEqual(opt.evaluations(), 734)

# Time after run is greater than zero
self.assertIsInstance(opt.time(), float)
Expand Down

0 comments on commit 13b263a

Please sign in to comment.