-
Notifications
You must be signed in to change notification settings - Fork 1
/
mlpmath.h
66 lines (51 loc) · 1.68 KB
/
mlpmath.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#ifndef MLPMATH
#define MLPMATH
#include "types.h"
#include <cmath>
inline double norm(const EigenMatrix &input)
{
return (input.transpose()*input).trace()/input.cols();
}
inline double norm2(const EigenMatrix &input)
{
return input.array().square().sum();
}
inline double sigmoid(double const &value) {
return 1/(1+exp(-value));
}
inline double sigmoidDerivative(double const &value) {
return sigmoid(value)*(1-sigmoid(value));
}
inline double sigmoidDerivativeA(double const &value) {
return value*(1-value);
}
inline double tanH(double const &value) {
return 1.7159*tanh(value*2/3);
}
inline double tanHDerivative(double const &value) {
return 1.7159*2/3*(1-pow(tanh(value*2/3),2));
}
inline double chooseConstFunction(double (*function)(double const &), double value)
{
return (*function)(value) ;
}
inline double chooseFunction(double (*function)(double), double value)
{
return (*function)(value) ;
}
inline EigenMatrix addBias(const EigenMatrix &matrix)
{
EigenMatrix newMatrix(matrix.rows()+1, matrix.cols());
newMatrix.block(0,0, matrix.rows(), matrix.cols()) = matrix;
newMatrix.block(matrix.rows(),0, 1, matrix.cols()) = -EigenVector::Ones(matrix.cols()).transpose();
return newMatrix;
}
inline EigenMatrix activation(const EigenMatrix &weights, const EigenMatrix &inputVector)
{
return (weights * addBias(inputVector)).unaryExpr(ptr_fun(sigmoid));
}
inline EigenMatrix activation(const EigenMatrix &weights, const EigenMatrix &inputVector, double (*function)(double const &))
{
return (weights * addBias(inputVector)).unaryExpr(ptr_fun(function));
}
#endif // MLPMATH