-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathentropy.py
97 lines (77 loc) · 2.66 KB
/
entropy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import os
import numpy as np
from matplotlib.pyplot import imread
#def Entropy(text):
# import math
# log2=lambda x:math.log(x)/math.log(2)
# exr={}
# infoc=0
# for each in text:
# try:
# exr[each]+=1
# except:
# exr[each]=1
# textlen=len(text)
# for k,v in exr.items():
# freq = 1.0*v/textlen
# infoc+=freq*log2(freq)
# infoc*=-1
# return infoc
def entropy(image_name):
"""Calculate the Shannon entropy of an image.
The Shannon entropy is defined as S = -sum(pk * log(pk)),
where pk are the number of pixels of value k.
Parameters
----------
image_name : string
Grayscale or Colour input image.
Returns
-------
entropy : list
If the input image is a colour image, it returns a list with the
entropies of the individual channels.
Examples
--------
>>> from image_entropy import entropy
>>> entropy('image.tiff')
"""
entropies = ENTROPY(imread(image_name))
file_name = os.path.basename(image_name)
name = os.path.splitext(file_name)[0]
f=open(name+'_entropies.txt', 'w')
f.writelines(file_name+" "+str(entropies))
f.close()
def ENTROPY(image_data):
"""Calculate the Shannon entropy of an image.
The Shannon entropy is defined as S = -sum(pk * log(pk)),
where pk are the number of pixels of value k.
Parameters
----------
image_data : Array
Numpy array.
Returns
-------
entropy : list
If the input image is a colour image, it returns a list with the
entropies of the individual channels.
Examples
--------
>>> from image_entropy import entropy
>>> entropy('image.tiff')
"""
rows, cols, channels = np.shape(image_data)
num_pixels = rows * cols
if channels == 4:
channels = 3 # discard the alpha channel
entropies = []
# using the Shannon's formula for calculating the entropy
# https://en.wiktionary.org/wiki/Shannon_entropy
for channel in range(channels):
channel_pixels, _ = np.histogram(image_data[:, :, channel].reshape(-1, 1), 256)
channel_probability = channel_pixels*1.0 / num_pixels
# if the number of pixels for a certain intensity is 0, replace it by 1
# this avoids floating point exceptions in the log2 function
channel_probability[channel_probability == 0] = 1
channel_entropy = -np.sum(channel_probability * np.log2(channel_probability))
entropies.append(channel_entropy)
return (entropies)