-
Notifications
You must be signed in to change notification settings - Fork 0
/
train.py
96 lines (78 loc) · 3.12 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
import torch
from sklearn.model_selection import KFold
from pp import PointProcessGPFA
from base import BaseGP, CosineKernel
# Example structure of a training loop within pytorch
def train_loop(y, model, loss, tol=1e-3):
# Training process
optimizer = torch.optim.SGD(model.parameters(), lr=1e-3)
optimizer.zero_grad()
tol = 1e-3
delta_loss = 1e6
while delta_loss > tol:
optimizer.zero_grad()
l = -1*loss(y)
print('l:%f' % l)
# This takes the calculate value, and automatically calculates gradients with respect to parameters
l.backward()
# Optimizer will take the gradients, and then update parameters accordingly
optimizer.step()
# Calculate new loss given the parameter update
l1 = -1*loss(y).detach()
delta_loss = torch.abs(l1 - l)
print('l1:%f' % l1)
# Extract the parameters
n = 0
sigma = []
mu = []
for parameters in model.parameters():
if (n==0):
W = torch.tensor(parameters)
elif(n%2==1):
sigma.append(torch.tensor(parameters))
elif(n%2==0):
mu.append(torch.tensor(parameters))
n = n+1
return W, sigma, mu
# Cross-Validation
def Cross_Validation(y):
sk = KFold(10)
sk = sk.split(y)
Marginal_likelihood = torch.tensor(0)
latent_dim = 0
print('The closest latent dimension is:', latent_dim)
# Check each possible value of the dimension of latent
for i in range(y.shape[1]):
Evaluate_Score_Sum = torch.tensor(0)
# K-fold Cross-Validation
for train, test in sk:
# Integrate traindata and testdata
traindata = y[train]
testdata = y[test]
# Model
base_models = [BaseGP(CosineKernel) for _ in range(i)]
model = PointProcessGPFA(4, base_models, W=None)
# Training
W, sigma, mu = train_loop(traindata, model, model.approximate_marginal_likelihood)
# Marginal likelihood
base_models = [BaseGP(CosineKernel, sigma=sigma[j], mu=mu[j]) for j in range(i)]
PG = PointProcessGPFA(4, base_models, W)
Evaluate_Score = -1*PG.approximate_marginal_likelihood(testdata)
Evaluate_Score_Sum = torch.add(Evaluate_Score_Sum, Evaluate_Score)
Evaluate_Score_Sum = torch.div(Evaluate_Score_Sum, 10)
print('Evaluate Score:', Evaluate_Score_Sum)
if (Evaluate_Score_Sum < Marginal_likelihood):
Marginal_likelihood = Evaluate_Score_Sum
latent_dim = i
print('Marginal likelihood:', Marginal_likelihood)
print('The closest latent dimension is:', latent_dim)
'''
def Cross_Validation_iomm(y):
sk = KFold(10)
sk = sk.split(y)
Marginal_likelihood = torch.tensor(0)
latent_dim = 0
print('The closest latent dimension is:', latent_dim)
# Check each possible value of the dimension of latent
for i in range()
'''