-
Notifications
You must be signed in to change notification settings - Fork 15
/
Copy pathactivation_simple.py
executable file
·74 lines (50 loc) · 1.32 KB
/
activation_simple.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#!/usr/bin/env python
import matplotlib.pyplot as plt
import numpy as np
def linear(z):
return z
def linearGradient(z):
return np.ones(z.shape)
def relu(z):
return np.where(z > 0, z, 0)
def reluGradient(z):
return np.where(z > 0, 1, 0)
def sigmoid(z):
return 1./(1+np.exp(-z))
def sigmoidGradient(z):
h = sigmoid(z)
return h*(1-h)
def tanh(z):
return np.tanh(z)
def tanhGradient(z):
return 1-np.power(tanh(z), 2.0)
z = np.linspace(-6, 6, 100)
plt.style.use('ggplot')
plt.plot(z, linear(z), 'r-')
plt.plot(z, linearGradient(z), 'b-')
plt.xlabel('z')
plt.title('Linear activation function')
plt.legend(['function', 'gradient'])
plt.savefig('figs/linear.png')
plt.close()
plt.plot(z, relu(z), 'r-')
plt.plot(z, reluGradient(z), 'b-')
plt.legend(['function', 'gradient'])
plt.xlabel('z')
plt.title('Rectified Linear activation function')
plt.savefig('figs/relu.png')
plt.close()
plt.plot(z, sigmoid(z), 'r-')
plt.plot(z, sigmoidGradient(z), 'b-')
plt.legend(['function', 'gradient'])
plt.xlabel('z')
plt.title('Sigmoid activation function')
plt.savefig('figs/sigmoid.png')
plt.close()
plt.plot(z, tanh(z), 'r-')
plt.plot(z, tanhGradient(z), 'b-')
plt.legend(['function', 'gradient'])
plt.xlabel('z')
plt.title('Hyperbolic Tangent activation function')
plt.savefig('figs/tanh.png')
plt.close()