-
Notifications
You must be signed in to change notification settings - Fork 0
/
sgd.py
61 lines (49 loc) · 1.54 KB
/
sgd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets
from sklearn.inspection import DecisionBoundaryDisplay
from sklearn.linear_model import SGDClassifier
def setup_sgd():
# import some data to play with
iris = datasets.load_iris()
# we only take the first two features. We could
# avoid this ugly slicing by using a two-dim dataset
X = iris.data[:, :2]
y = iris.target
colors = ["navy", "turquoise", "darkorange"]
# shuffle
idx = np.arange(X.shape[0])
np.random.seed(13)
np.random.shuffle(idx)
X = X[idx]
y = y[idx]
# standardize
mean = X.mean(axis=0)
std = X.std(axis=0)
X = (X - mean) / std
clf = SGDClassifier(alpha=0.001, max_iter=100).fit(X, y)
# Plot also the training points
plt.figure()
for i, color in zip(clf.classes_, colors):
idx = np.where(y == i)
plt.scatter(
X[idx, 0],
X[idx, 1],
c=color,
label=iris.target_names[i],
# cmap=plt.cm.Paired,
# edgecolor="black",
s=20,
)
plt.title("Decision surface of multi-class SGD")
plt.axis("tight")
# Plot the three one-against-all classifiers
xmin, xmax = plt.xlim()
ymin, ymax = plt.ylim()
coef = clf.coef_
intercept = clf.intercept_
# def plot_hyperplane(c, color):
# def line(x0):
# return (-(x0 * coef[c, 0]) - intercept[c]) / coef[c, 1]
# plt.plot([xmin, xmax], [line(xmin), line(xmax)], ls="--", color=color)
plt.legend()