-
Notifications
You must be signed in to change notification settings - Fork 0
/
infotheory.py
70 lines (55 loc) · 2.17 KB
/
infotheory.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
import numpy as np
def entropy(hgram, ax=None):
'''
Entropy H(X) of one variable given joint histogram
hgram = joint histogram (2d array)
ax = axis over which to sum histogram to compute marginal distribution
returns: entropy value
'''
# Convert bins counts to probability values
px = hgram / float(np.sum(hgram))
if ax:
px = np.sum(px, axis=ax) # marginal for x over y
nzs = px > 0 # Only non-zero pxy values contribute to the sum
ex = -np.sum(px[nzs] * np.log2(px[nzs]))
return ex
def mutual_information(hgram):
'''
Mutual information I(X,Y) for joint histogram
hgram = joint histogram (2d array)
'''
# Convert bins counts to probability values
pxy = hgram / float(np.sum(hgram))
px = np.sum(pxy, axis=1) # marginal for x over y
py = np.sum(pxy, axis=0) # marginal for y over x
px_py = px[:, None] * py[None, :] # Broadcast to multiply marginals
# Now we can do the calculation using the pxy, px_py 2D arrays
nzs = pxy > 0 # Only non-zero pxy values contribute to the sum
return np.sum(pxy[nzs] * np.log2(pxy[nzs] / px_py[nzs]))
def joint_entropy(hgram):
'''
Joint entropy H(X,Y) for joint histogram
hgram = joint histogram (2d array)
returns: joint entropy value
'''
# Convert bins counts to probability values
pxy = hgram / float(np.sum(hgram))
# Now we can do the calculation using the pxy, px_py 2D arrays
nzs = pxy > 0 # Only non-zero pxy values contribute to the sum
#print pxy[nzs]
return -np.sum(pxy[nzs] * np.log2(pxy[nzs]))
def conditional_entropy(hgram, ax):
'''
Conditional entropy H(Y|X) for joint histogram
hgram = joint histogram (2d array)
ax = axis over which to sum to compute marginal distribution of X
returns: joint entropy value
'''
# Convert bins counts to probability values
pxy = hgram / float(np.sum(hgram))
px = np.sum(pxy, axis=ax) # marginal for x over y
je = joint_entropy(hgram)
# Now we can do the calculation using the pxy, px_py 2D arrays
nzs = px > 0 # Only non-zero pxy values contribute to the sum
ex = -np.sum(px[nzs] * np.log2(px[nzs]))
return je - ex