From 4403b59ed8de638b074c6b35d2a2ff08f1b2300b Mon Sep 17 00:00:00 2001 From: Siddesh Sambasivam Date: Mon, 28 Jun 2021 00:00:22 +0530 Subject: [PATCH] add module and SGD --- .gitignore | 3 +- README.md | 41 +-------------------------- examples/diabetes_linearRegression.py | 1 + matterix/nn.py | 29 +++++++++++++++++-- matterix/optim.py | 21 ++++++++++++++ 5 files changed, 52 insertions(+), 43 deletions(-) diff --git a/.gitignore b/.gitignore index 0cfd736..65fd8a0 100644 --- a/.gitignore +++ b/.gitignore @@ -143,4 +143,5 @@ cython_debug/ # Development notebooks /*.ipynb -*.gz \ No newline at end of file +*.gz +test.py \ No newline at end of file diff --git a/README.md b/README.md index 3e7c946..d0cd228 100644 --- a/README.md +++ b/README.md @@ -56,46 +56,7 @@ import numpy as np from matterix import Tensor import matterix.functions as F -# Prepare training data -x = [[i] for i in range(1, 200)] -y = [[0] if i % 2 == 0 else [1] for i in range(1, 200)] - -x_train, y_train = Tensor(x[:150]), Tensor(y[:150]) -x_test, y_test = Tensor(x[150:]), Tensor(y[150:]) - -w1 = Tensor(np.random.randn(1, 150), requires_grad=True) -b1 = Tensor(np.random.randn(1, 150), requires_grad=True) -w2 = Tensor(np.random.randn(150, 1), requires_grad=True) - - -def model(x): - - out_1 = (x @ w1) + b1 - out_2 = F.sigmoid(out_1) - output = out_2 @ w2 - - return output - - -for i in range(100): - - y_pred = model(x_train) - loss = y_train - y_pred - - mse_loss = (loss * loss).sum() * (1.0 / (loss.numel())) - - mse_loss.backward() - - w1 -= w1.grad * 0.001 - b1 -= b1.grad * 0.001 - w2 -= w2.grad * 0.001 - - w1.zero_grad() - w2.zero_grad() - b1.zero_grad() - - print(f"Epoch: {i} Loss: {mse_loss.data}") - +# TO BE ADDED ``` Take a look at `examples` for different examples diff --git a/examples/diabetes_linearRegression.py b/examples/diabetes_linearRegression.py index 345bd9c..e085841 100644 --- a/examples/diabetes_linearRegression.py +++ b/examples/diabetes_linearRegression.py @@ -19,6 +19,7 @@ def model(x): out_1 = (x @ w1) + b1 + output = (out_1 @ w2) + b2 return output diff --git a/matterix/nn.py b/matterix/nn.py index fe47ffb..e5b9c58 100644 --- a/matterix/nn.py +++ b/matterix/nn.py @@ -1,9 +1,34 @@ -# TODO: Module class to represent neural networks +import inspect +from matterix.tensor import Tensor class Module: def __init__(self) -> None: pass + def parameters(self): + params = dict() + # Not sure if this account for all the cases + for i in inspect.getmembers(self): + if not i[0].startswith("_"): + if not inspect.ismethod(i[1]): + params[i[0]] = i[1] + + return params + + def __call__(self, x) -> Tensor: + + forward_fn = getattr(self, "forward", None) # None is a default value + if callable(forward_fn): + return self.forward(x) + else: + raise NotImplementedError("Forward function is not implemented") + def zero_grad(self) -> None: - pass + + params = self.parameters() + for k, v in params.items(): + v.zero_grad() + params[k] = v + + self.__dict__.update(params) diff --git a/matterix/optim.py b/matterix/optim.py index 2b14b26..2832940 100644 --- a/matterix/optim.py +++ b/matterix/optim.py @@ -1 +1,22 @@ # TODO: SGD, Adam, RMSProp + +# Model (params) +# -> Optimizer (which updates the parameters) +# -> Needs to be reflected in the Model (params) + + +class SGD: + def __init__(self, model, parameters, lr: float = 0.001) -> None: + self.model = model + self.params = parameters + self.lr = lr + + def step(self): + + for k, v in self.params.items(): + v -= v.grad * self.lr + self.params[k] = v + self.model.__dict__.update(self.params) + + def zero_grad(self) -> None: + self.model.zero_grad()