-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy pathlogistics.py
More file actions
48 lines (41 loc) · 1.68 KB
/
logistics.py
File metadata and controls
48 lines (41 loc) · 1.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
#!/usr/bin/env python
# coding: utf-8
import numpy as np
import matplotlib.pyplot as plt
def draw(x1, x2):
ln = plt.plot(x1, x2,'-')
plt.pause(0.0001)
ln[0].remove()
def sigmoid(score):
return 1/(1 + np.exp(-score))
def calculate_error(line_parameters, points, y): # y is the label
m = points.shape[0]
linear_combinations = points * line_parameters
p = sigmoid(linear_combinations)
cross_entropy = -(1/m)*(np.log(p).T * y + np.log(1 - p).T*(1-y))
return cross_entropy
def gradient_descent(line_parameters, points, y, alpha):
m = points.shape[0]
for i in range(1000):
p = sigmoid(points * line_parameters)
gradient =(points.T * (p - y)) * (alpha / m)
line_parameters -= gradient
w1 = line_parameters.item(0)
w2 = line_parameters.item(1)
b = line_parameters.item(2)
x1 = np.array([points[:,0].min(), points[:,0].max()])
x2 = -b/w2 + x1 * (-w1 / w2)
draw(x1, x2)
calculate_error(line_parameters, all_points, y) # Prints the error in each iteration.
n_pts = 100
np.random.seed(0) # To generate the same random data points.
bias = np.ones(n_pts)
top_region = np.array([np.random.normal(10, 2, n_pts), np.random.normal(12, 2 , n_pts), bias]).T
bottom_region = np.array([np.random.normal(5,2,n_pts), np.random.normal(6,2,n_pts), bias]).T
all_points = np.vstack((top_region, bottom_region))
line_parameters = np.matrix([np.zeros(3)]).T
y = np.array([np.zeros(n_pts), np.ones(n_pts)]).reshape(n_pts*2, 1)
plt.scatter(top_region[:,0], top_region[:,1], color='r')
plt.scatter(bottom_region[:,0], bottom_region[:,1], color='b')
gradient_descent(line_parameters, all_points, y, 0.06)
plt.show()