-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpolynomial_example.py
More file actions
109 lines (85 loc) · 3.07 KB
/
polynomial_example.py
File metadata and controls
109 lines (85 loc) · 3.07 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#!/usr/bin/env python3
"""
KortexDL Wine Classification Example
=====================================
Multi-class classification using the Wine dataset.
Usage:
python polynomial_example.py
"""
import numpy as np
import kortexdl as bd
try:
from sklearn.datasets import load_wine
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
SKLEARN_AVAILABLE = True
except ImportError:
SKLEARN_AVAILABLE = False
def main():
print("🎯 KortexDL Classification - Wine Dataset")
print("=" * 60)
if not SKLEARN_AVAILABLE:
print("❌ sklearn is required. Install with: pip install scikit-learn")
return 1
# Load Wine dataset
print("\n📁 Loading Wine dataset...")
data = load_wine()
X = data.data.astype(np.float32)
y_raw = data.target
# One-hot encode
n_classes = 3
y = np.zeros((len(y_raw), n_classes), dtype=np.float32)
for i, label in enumerate(y_raw):
y[i, label] = 1.0
print(f"✅ Dataset: {len(X)} samples")
print(f"✅ Features: {X.shape[1]} (alcohol, malic acid, ash, etc.)")
print(f"✅ Classes: {n_classes} wine cultivars")
# Normalize
scaler = StandardScaler()
X = scaler.fit_transform(X).astype(np.float32)
# Split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
y_test_raw = np.argmax(y_test, axis=1)
print(f"✅ Split: {len(X_train)} train, {len(X_test)} test")
# Create network
print("\n🧠 Creating classification network...")
n_features = X.shape[1]
net = bd.Network([n_features, 32, 16, n_classes], bd.ActivationType.Sigmoid)
print(f"✅ Network: {n_features} -> 32 -> 16 -> {n_classes}")
# Training
print("\n🏋️ Training...")
epochs = 300
X_flat = X_train.flatten().tolist()
y_flat = y_train.flatten().tolist()
for epoch in range(epochs):
loss = net.train_batch(X_flat, y_flat, bd.LossType.MSE, 0.1, len(X_train))
if epoch % 50 == 0:
print(f" Epoch {epoch:3d}: Loss = {loss:.4f}")
print("✅ Training complete!")
# Evaluation
print("\n📈 Evaluating...")
correct = 0
predictions = []
for i in range(len(X_test)):
output = net.forward(X_test[i].tolist(), 1, False)
pred = np.argmax(output)
true = y_test_raw[i]
predictions.append(pred)
if pred == true:
correct += 1
accuracy = correct / len(X_test) * 100
print(f"✅ Test Accuracy: {accuracy:.1f}%")
print(f"✅ Correct: {correct}/{len(X_test)}")
# Sample predictions
class_names = ['Class 0', 'Class 1', 'Class 2']
print("\n📊 Sample Predictions:")
for i in range(min(10, len(X_test))):
pred = predictions[i]
true = y_test_raw[i]
status = "✓" if pred == true else "✗"
print(f" True: {class_names[true]} Pred: {class_names[pred]} {status}")
print("\n" + "=" * 60)
print("✅ Complete!")
return 0
if __name__ == "__main__":
exit(main())