-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgradient-descent.py
More file actions
executable file
·51 lines (32 loc) · 1.41 KB
/
gradient-descent.py
File metadata and controls
executable file
·51 lines (32 loc) · 1.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
#!/usr/bin/python
import numpy as np
def gradient_descent(inputs, outputs):
# initial values for slope, intercept
slope = 0.32528735632182293
intercept = -4.1586206896548035
# THESE ARE THE VALUES YOU MAY NEED TO RECALIBRATE TO ZERO-IN ON THE GLOBAL MINIMA
# number of steps taken along the curve (towards the mse minima)
index = 1000000
# size of the steps taken along the curve
learn_rate = 0.00093
for i in range(index):
# calculate predicted output
outputs_pred = slope * inputs + intercept
# calculate MSE
n = len(inputs)
mse = (1/n) * sum([val**2 for val in (outputs - outputs_pred)])
# calculate partial derivatives to help mark next point on curve
# calculate derivative of slope
deriv_slope = -(2/n) * sum(inputs * (outputs - outputs_pred))
# calculate derivative of intercept
deriv_intercept = -(2/n) * sum(outputs - outputs_pred)
# recalibrate current values
slope = slope - learn_rate * deriv_slope
intercept = intercept - learn_rate * deriv_intercept
print("slope: {}, intercept: {}, MSE: {}, index: {}".format(slope, intercept, mse, i))
def init():
inputs = np.array([15, 18, 31, 34, 52])
outputs = np.array([1, 2, 4, 8, 13])
gradient_descent(inputs, outputs)
if __name__ == "__main__":
init()