-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathNon_linear.py
75 lines (60 loc) · 2.14 KB
/
Non_linear.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
from Module import *
class Sigmoide(Module):
def __init__(self):
pass
def forward(self, X):
X = np.maximum(X,1e-5)
return 1/(1+np.exp(-X))
def update_parameters(self, gradient_step=1e-3):
## Calcule la mise a jour des parametres selon le gradient calcule et le pas de gradient_step
pass
def backward_update_gradient(self, input, delta):
## Met a jour la valeur du gradient
# return self.forward(input)*(1-self.forward(input))
pass
def backward_delta(self, input, delta):
## Calcul la derivee de l'erreur
# print("sig",input.shape,delta.shape)
return (self.forward(input)*(1-self.forward(input))) * delta
def zero_grad(self):
## Annule gradient
pass
class Tanh(Module):
def __init__(self):
pass
def forward(self, X):
## Calcule la passe forward
# print("TanhForward",X,np.tanh(X))
return np.tanh(X)
def update_parameters(self, gradient_step=1e-3):
## Calcule la mise a jour des parametres selon le gradient calcule et le pas de gradient_step
pass
def backward_update_gradient(self, input, delta):
## Met a jour la valeur du gradient
# return (1-self.forward(input)*2)@delta
pass
def backward_delta(self, input, delta):
## Calcul la derivee de l'erreur
# print("tanh", input.shape, delta.shape)
return (1-self.forward(input)**2)*delta
def zero_grad(self):
## Annule gradient
pass
class ReLU(Module):
def __init__(self):
pass
def forward(self, X):
## Calcule la passe forward
return X * (X > 0)
def update_parameters(self, gradient_step=1e-3):
## Calcule la mise a jour des parametres selon le gradient calcule et le pas de gradient_step
pass
def backward_update_gradient(self, input, delta):
## Met a jour la valeur du gradient
pass
def backward_delta(self, input, delta):
## Calcul la derivee de l'erreur
return (input > 0) * np.ones(input.shape) * delta
def zero_grad(self):
## Annule gradient
pass