-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmininest.py
114 lines (92 loc) · 3.72 KB
/
mininest.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
# NESTED SAMPLING MAIN PROGRAM
# (GNU General Public License software, (C) Sivia and Skilling 2006)
# This file was translated to Python by Issac Trotts in 2007.
from math import exp, log , sqrt
import random
# or so
DBL_MAX = 1e300
# ~U[0,1)
uniform = random.random
# logarithmic addition log(exp(x)+exp(y))
def plus(x,y):
if x>y:
return x+log(1+exp(y-x))
else:
return y+log(1+exp(x-y))
# n = number of objects to evolve
def nested_sampling(n, max_iter, sample_from_prior, explore):
"""
This is an implementation of John Skilling's Nested Sampling algorithm
for computing the normalizing constant of a probability distribution
(usually the posterior in Bayesian inference).
The return value is a dictionary with the following entries:
"samples"
"num_iterations"
"logZ"
"logZ_sdev"
"info_nats"
"info_sdev"
More information is available here:
http://www.inference.phy.cam.ac.uk/bayesys/
"""
# FIXME: Add a simple example to the doc string.
ObjList = [] # Collection of n objects
Samples = [] # Objects stored for posterior results
logwidth = None # ln(width in prior mass)
logLstar = None # ln(Likelihood constraint)
H = 0.0 # Information, initially 0
logZ =-DBL_MAX # ln(Evidence Z, initially 0)
logZnew = None # Updated logZ
copy = None # Duplicated object
worst = None # Worst object
nest = None # Nested sampling iteration count
# Set prior objects
for i in range(n):
ObjList.append(sample_from_prior())
# Outermost interval of prior mass
logwidth = log(1.0 - exp(-1.0 / n))
# NESTED SAMPLING LOOP ___________________________________________
for nest in range(max_iter):
# Worst object in collection, with Weight = width * Likelihood
worst = 0;
for i in range(1,n):
if ObjList[i].logL < ObjList[worst].logL:
worst = i
ObjList[worst].logWt = logwidth + ObjList[worst].logL;
# Update Evidence Z and Information H
logZnew = plus(logZ, ObjList[worst].logWt)
H = exp(ObjList[worst].logWt - logZnew) * ObjList[worst].logL + \
exp(logZ - logZnew) * (H + logZ) - logZnew;
# ## my incorrect estimation of H
# H += exp(ObjList[worst].logWt - logZnew) * (ObjList[worst].logL - logZnew)
logZ = logZnew;
# Posterior Samples (optional)
Samples.append(ObjList[worst])
# Kill worst object in favour of copy of different survivor
if n>1: # don't kill if n is only 1
while True:
copy = int(n * uniform()) % n # force 0 <= copy < n
if copy != worst:
break
logLstar = ObjList[worst].logL; # new likelihood constraint
ObjList[worst] = ObjList[copy]; # overwrite worst object
# Evolve copied object within constraint
updated = explore(ObjList[worst], logLstar);
assert(updated != None) # Make sure explore didn't update in-place
ObjList[worst] = updated
# Shrink interval
logwidth -= 1.0 / n;
if nest > 2 * n*H + 1:
print("\nFinished mininest at {} iterations\n".format(nest))
break
else:
print("Something went wrong. Try increasing the max number of iterations.")
# Exit with evidence Z, information H, and optional posterior Samples
sdev_H = H/log(2.)
sdev_logZ = sqrt(H/n)
return {"samples":Samples,
"num_iterations":(nest+1),
"logZ":logZ,
"logZ_sdev":sdev_logZ,
"info_nats":H,
"info_sdev":sdev_H}