-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathWine Project(No Normalization).py
More file actions
64 lines (43 loc) · 1.69 KB
/
Wine Project(No Normalization).py
File metadata and controls
64 lines (43 loc) · 1.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import numpy as np
import pandas as pd
# a package for machine learning
import sklearn
from sklearn.linear_model import LogisticRegression
from sklearn import model_selection
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
data = pd.read_csv("C:/Users/Ziqi Xu/Desktop/Machine Learning Project(JNG)/Wine Data.csv")
data.dropna(inplace = True)
data["new Type"] = (data["type"] == "white").astype(int)
data.reset_index(inplace = True, drop = True)
# Study the Distribution of column 'quality'. This is highly imbalance.
counting_numbers = []
for i in range(3, 10, 1):
count_temp = 0
for index in range(0, data.__len__(), 1):
if data["quality"][index] == i:
count_temp += 1
counting_numbers.append(count_temp)
# Train a binary classification model.
# Change labels
data["new Label"] = (data["quality"] > 5.5).astype(int)
# Generate the training & test dataset with the help of sklearn.model_selection.train_test_split()
data_columns = list(data.columns)
X = data[["new Type"] + data_columns[1 : 11]]
y = data["new Label"]
X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y, test_size = 0.2, random_state = 1)
# Train the model
# Generate an empty model
LR_model = LogisticRegression()
# Train the model to get beta parameters
LR_model.fit(X_train, y_train)
# Make predictions
y_pred_proba = LR_model.predict_proba(X_test)
y_pred_label = LR_model.predict(X_test)
# Model performance evaluation
print("The model accuracy score is ", accuracy_score(y_test, y_pred_label), "!")