-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathregression3_sgd.py
More file actions
executable file
·80 lines (62 loc) · 1.63 KB
/
regression3_sgd.py
File metadata and controls
executable file
·80 lines (62 loc) · 1.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import numpy as np
import matplotlib.pyplot as plt
# 学習データを読み込む
train = np.loadtxt('click.csv', delimiter=',', dtype='int', skiprows=1)
train_x = train[:,0]
train_y = train[:,1]
# 標準化
mu = train_x.mean()
sigma = train_x.std()
def standardize(x):
return (x - mu) / sigma
train_z = standardize(train_x)
# パラメータを初期化
theta = np.random.rand(3)
# 学習データの行列を作る
def to_matrix(x):
return np.vstack([np.ones(x.size), x, x ** 2]).T
X = to_matrix(train_z)
print(X)
# 予測関数
def f(x):
return np.dot(x, theta)
# 平均二乗誤差
def MSE(x, y):
return (1 / x.shape[0]) * np.sum((y - f(x)) ** 2)
# 学習率
ETA = 1e-3
# 誤差の差分
diff = 1
# 更新回数
count = 0
# 学習を繰り返す
error = MSE(X, train_y)
while diff > 1e-2:
# 確率的勾配降下法でパラメータ更新
p = np.random.permutation(X.shape[0])
for x, y in zip(X[p,:], train_y[p]):
theta = theta - ETA * (f(x) - y) * x
# 前回の誤差との差分を計算
current_error = MSE(X, train_y)
diff = error - current_error
error = current_error
# 平均誤差の数値変化
errors =[]
diff = 1
errors.append(MSE(X,train_y))
while diff > 1e-2 :
theta = theta - ETA * np.dot(f(X) - train_y,X)
errors.append(MSE(X,train_y))
diff = errors[-2] - errors[-1]
x = np.arange(len(errors))
plt.plot(x, errors)
plt.show()
# ログの出力
count += 1
log = '{}回目: theta = {}, 差分 = {:.4f}'
print(log.format(count, theta, diff))
# プロットして確認
x = np.linspace(-3, 3, 100)
plt.plot(train_z, train_y, 'o')
plt.plot(x, f(to_matrix(x)))
plt.show()