-
Notifications
You must be signed in to change notification settings - Fork 2
/
activation.py
74 lines (53 loc) · 1.24 KB
/
activation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import numpy as np
def sigmoid(x):
"""
Sigmoid function. It can be replaced with scipy.special.expit.
:param x:
:return:
"""
return 1 / (1 + np.exp(-x))
def sigmoid_der(x):
"""
Derivate of the sigmoid function.
:param y:
:return:
"""
return sigmoid(x) * (1.0 - sigmoid(x))
def tanh(x):
"""
Hyperbolic tangent
:param x:
:return:
"""
return np.tanh(x)
def tanh_der(x):
"""
Derivate of the hyperbolic tangent function.
:param x:
:return:
"""
return 1.0 - np.power(tanh(x), 2)
def leaky_relu(x):
"""
Leaky rectified linear unit.
:param x:
:return:
"""
return np.minimum(0.01 * x, x)
def leaky_relu_der(x):
"""
Derivate of leaky relu.
:param x:
:return:
"""
y = np.ones_like(x)
y[x > 0] = 0.01
return y
fun_dict = {'sigmoid': {'activation': sigmoid,
'derivative': sigmoid_der},
'tanh': {'activation': tanh,
'derivative': tanh_der},
'linear': {'activation': lambda x: x,
'derivative': lambda x: 1.0},
'leaky_relu': {'activation': leaky_relu,
'derivative': leaky_relu_der}}