-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlinear.py
28 lines (20 loc) · 888 Bytes
/
linear.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
import numpy as np
from src.nn.layers.initializers import init_parameters
from src.nn.module import Module
class LinearLayer(Module):
def __init__(self, in_dim, out_dim, initializer=None):
super().__init__()
self.weights, self.bias = init_parameters(in_dim, out_dim, initializer)
self.prev_activation = None
self.z = None
def forward(self, x):
self.prev_activation = x
self.z = np.dot(self.weights.val, self.prev_activation) + self.bias.val
return self.z
def backward(self, upstream_gradient):
self.weights.grad = np.dot(upstream_gradient, self.prev_activation.T)
self.bias.grad = np.sum(upstream_gradient, axis=1, keepdims=True)
grad_act_prev = np.dot(self.weights.val.T, upstream_gradient)
return grad_act_prev
def parameters(self):
return [self.weights, self.bias]