forked from rushter/MLAlgorithms
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathactivations.py
47 lines (30 loc) · 853 Bytes
/
activations.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import autograd.numpy as np
"""
References:
https://en.wikipedia.org/wiki/Activation_function
"""
def sigmoid(z):
return 1.0 / (1.0 + np.exp(-z))
def softmax(z):
# Avoid numerical overflow by removing max
e = np.exp(z - np.amax(z, axis=1, keepdims=True))
return e / np.sum(e, axis=1, keepdims=True)
def linear(z):
return z
def softplus(z):
"""Smooth relu."""
# Avoid numerical overflow, see:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.logaddexp.html
return np.logaddexp(0.0, z)
def softsign(z):
return z / (1 + np.abs(z))
def tanh(z):
return np.tanh(z)
def relu(z):
return np.maximum(0, z)
def get_activation(name):
"""Return activation function by name"""
try:
return globals()[name]
except:
raise ValueError('Invalid activation function.')