-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgradient.txt
53 lines (38 loc) · 960 Bytes
/
gradient.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
#implement Gradient Descent Algorithm to find the local minima of a function
import numpy as np
import pandas as pd
import sympy as sym
import matplotlib as pyplot
from matplotlib import pyplot
def objective(x):
return (x+3)**2
def derivative(x):
return 2*(x+3)
def gradient(alpha,start,max_iter):
x_list=list()
x=start
x_list.append(x)
for i in range(max_iter):
gradi=derivative(x)
x=x-(alpha*gradi)
x_list.append(x)
return x_list
x=sym.symbols('x')
expr=(x+3)**2.0
grad=sym.Derivative(expr,x)
print("{}".format(grad.doit()))
grad.doit().subs(x,2)
alpha=0.1
start=2
max_iter=30
x=sym.symbols('x')
expr=(x+3)**2
x_cor=np.linspace(-15,15,100)
pyplot.plot(x_cor,objective(x_cor))
pyplot.plot(2,objective(2),'ro')
x=gradient(alpha,start,max_iter)
x_cor=np.linspace(-5,5,100)
pyplot.plot(x_cor,objective(x_cor))
x_arr=np.array(x)
pyplot.plot(x_arr,objective(x_arr),'.-',color='red')
pyplot.show()