Skip to content

Commit

Permalink
add module and SGD
Browse files Browse the repository at this point in the history
  • Loading branch information
SiddeshSambasivam committed Jun 27, 2021
1 parent 5e521a1 commit 4403b59
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 43 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -143,4 +143,5 @@ cython_debug/

# Development notebooks
/*.ipynb
*.gz
*.gz
test.py
41 changes: 1 addition & 40 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,46 +56,7 @@ import numpy as np
from matterix import Tensor
import matterix.functions as F

# Prepare training data
x = [[i] for i in range(1, 200)]
y = [[0] if i % 2 == 0 else [1] for i in range(1, 200)]

x_train, y_train = Tensor(x[:150]), Tensor(y[:150])
x_test, y_test = Tensor(x[150:]), Tensor(y[150:])

w1 = Tensor(np.random.randn(1, 150), requires_grad=True)
b1 = Tensor(np.random.randn(1, 150), requires_grad=True)
w2 = Tensor(np.random.randn(150, 1), requires_grad=True)


def model(x):

out_1 = (x @ w1) + b1
out_2 = F.sigmoid(out_1)
output = out_2 @ w2

return output


for i in range(100):

y_pred = model(x_train)
loss = y_train - y_pred

mse_loss = (loss * loss).sum() * (1.0 / (loss.numel()))

mse_loss.backward()

w1 -= w1.grad * 0.001
b1 -= b1.grad * 0.001
w2 -= w2.grad * 0.001

w1.zero_grad()
w2.zero_grad()
b1.zero_grad()

print(f"Epoch: {i} Loss: {mse_loss.data}")

# TO BE ADDED
```

Take a look at `examples` for different examples
Expand Down
1 change: 1 addition & 0 deletions examples/diabetes_linearRegression.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
def model(x):

out_1 = (x @ w1) + b1

output = (out_1 @ w2) + b2

return output
Expand Down
29 changes: 27 additions & 2 deletions matterix/nn.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,34 @@
# TODO: Module class to represent neural networks
import inspect
from matterix.tensor import Tensor


class Module:
def __init__(self) -> None:
pass

def parameters(self):
params = dict()
# Not sure if this account for all the cases
for i in inspect.getmembers(self):
if not i[0].startswith("_"):
if not inspect.ismethod(i[1]):
params[i[0]] = i[1]

return params

def __call__(self, x) -> Tensor:

forward_fn = getattr(self, "forward", None) # None is a default value
if callable(forward_fn):
return self.forward(x)
else:
raise NotImplementedError("Forward function is not implemented")

def zero_grad(self) -> None:
pass

params = self.parameters()
for k, v in params.items():
v.zero_grad()
params[k] = v

self.__dict__.update(params)
21 changes: 21 additions & 0 deletions matterix/optim.py
Original file line number Diff line number Diff line change
@@ -1 +1,22 @@
# TODO: SGD, Adam, RMSProp

# Model (params)
# -> Optimizer (which updates the parameters)
# -> Needs to be reflected in the Model (params)


class SGD:
def __init__(self, model, parameters, lr: float = 0.001) -> None:
self.model = model
self.params = parameters
self.lr = lr

def step(self):

for k, v in self.params.items():
v -= v.grad * self.lr
self.params[k] = v
self.model.__dict__.update(self.params)

def zero_grad(self) -> None:
self.model.zero_grad()

0 comments on commit 4403b59

Please sign in to comment.