-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathclassification_example.py
More file actions
107 lines (83 loc) · 3.18 KB
/
classification_example.py
File metadata and controls
107 lines (83 loc) · 3.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#!/usr/bin/env python3
"""
KortexDL Classification - Iris Dataset (Stable)
================================================
Conservative hyperparameters for stable training.
Usage:
python classification_example.py
"""
import numpy as np
import kortexdl as bd
try:
from sklearn.datasets import load_iris
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
SKLEARN_AVAILABLE = True
except ImportError:
SKLEARN_AVAILABLE = False
def main():
print("🎯 KortexDL Classification - Iris Dataset")
print("=" * 60)
if not SKLEARN_AVAILABLE:
print("❌ sklearn required: pip install scikit-learn")
return 1
# Load Iris dataset
print("\n📁 Loading Iris dataset...")
data = load_iris()
X = data.data.astype(np.float32)
y_raw = data.target
# One-hot encode
n_classes = 3
y = np.zeros((len(y_raw), n_classes), dtype=np.float32)
for i, label in enumerate(y_raw):
y[i, label] = 1.0
print(f"✅ Dataset: {len(X)} samples, {X.shape[1]} features, {n_classes} classes")
# Normalize
scaler = StandardScaler()
X = scaler.fit_transform(X).astype(np.float32)
# Split with stratification
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.2, random_state=42, stratify=y_raw
)
print(f"✅ Split: {len(X_train)} train, {len(X_test)} test")
# Simple network with Sigmoid (bounded outputs)
print("\n🧠 Creating network...")
net = bd.Network([4, 16, 8, 3], bd.ActivationType.Sigmoid)
print("✅ Network: 4 -> 16 -> 8 -> 3 (Sigmoid)")
# Conservative learning rate, many epochs
print("\n🏋️ Training...")
epochs = 1000
learning_rate = 0.5
X_flat = X_train.flatten().tolist()
y_flat = y_train.flatten().tolist()
for epoch in range(epochs):
loss = net.train_batch(X_flat, y_flat, bd.LossType.MSE, learning_rate, len(X_train))
if epoch % 200 == 0:
correct = sum(1 for i in range(len(X_test))
if np.argmax(net.forward(X_test[i].tolist(), 1, False)) == np.argmax(y_test[i]))
acc = correct / len(X_test) * 100
print(f" Epoch {epoch:4d}: Loss = {loss:.4f}, Acc = {acc:.1f}%")
# Final evaluation
print("\n📈 Final Evaluation...")
correct = 0
class_names = ['Setosa', 'Versicolor', 'Virginica']
for i in range(len(X_test)):
output = net.forward(X_test[i].tolist(), 1, False)
pred = np.argmax(output)
true = np.argmax(y_test[i])
if pred == true:
correct += 1
accuracy = correct / len(X_test) * 100
print(f"✅ Test Accuracy: {accuracy:.1f}%")
print("\n📊 Sample Predictions:")
for i in range(min(10, len(X_test))):
output = net.forward(X_test[i].tolist(), 1, False)
pred = np.argmax(output)
true = np.argmax(y_test[i])
status = "✓" if pred == true else "✗"
print(f" {class_names[true]:12s} -> {class_names[pred]:12s} {status}")
print("\n" + "=" * 60)
print("✅ Complete!")
return 0
if __name__ == "__main__":
exit(main())