-
Notifications
You must be signed in to change notification settings - Fork 0
/
basic_bestfitline.py
81 lines (59 loc) · 1.43 KB
/
basic_bestfitline.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
# -*- coding: utf-8 -*-
"""basic_bestfitline.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1ShVT-DDjv632i2w1GLGaquUSmxJTa6EX
"""
import numpy as np
import matplotlib.pyplot as plt
print("X:")
x=np.linspace(1,100,30,endpoint=False)
print(x)
print("Y:")
y=np.zeros(30,dtype=float)
y = [ 43, 62.9, 62.8, 82.7, 78.4, 75., 81.6, 88.2, 94.8, 101.4, 152., 151.9,
171.8, 181.7, 191.6, 151., 157.6, 164.2, 160.8, 177.4, 251., 190.6, 270.8, 203.8,
290.6, 207., 300.4, 230.2, 226.8, 233.4]
print(y)
plt.scatter(x,y)
m=7
b=100
z=m*x
z=z+b
plt.plot(x,z,color='red')
def cost(p):
c=0
for i in range(30):
c+=(y[i]-p[i])*(y[i]-p[i])
return c
print("Initial Cost:", cost(z))
def update(m,b,lr,lr2):
#old implementation
#grad=0
#gradb=0
#for i in range(30):
#grad+=(y[i]-m*x[i]-b)*-1*x[i]
#gradb+=-(y[i]-m*x[i]-b)
grad=np.sum(np.dot(-1*(x.T),y-(m*x + b)))
gradb=np.sum(-1*(y-(m*x + b)))
M=m-grad*lr
B=b-gradb*lr2
return M,B
print("Checking Update Function",update(5,100,0.000001,0.0001))
print("Improving Loss:")
m=-2
b=-200
z=m*x
z=z+b
for i in range(300):
if i%10==0:
print(cost(z))
m,b=update(m,b,0.000001,0.001)
z=m*x
z=z+b
print("************************************************")
print(" ")
print("Final Model Predicted Line Stats:")
print("Slope:" , m , " ;Y intercept" , b)
plt.plot(x,z,color='green')
plt.scatter(x,y)