-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsine_wave_example.py
More file actions
100 lines (77 loc) · 3.05 KB
/
sine_wave_example.py
File metadata and controls
100 lines (77 loc) · 3.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
#!/usr/bin/env python3
"""
KortexDL Boston Housing Regression
===================================
Regression on the Boston Housing dataset.
Usage:
python sine_wave_example.py
"""
import numpy as np
import kortexdl as bd
try:
from sklearn.datasets import fetch_california_housing
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
SKLEARN_AVAILABLE = True
except ImportError:
SKLEARN_AVAILABLE = False
def main():
print("🎯 KortexDL Regression - Diabetes Dataset")
print("=" * 60)
if not SKLEARN_AVAILABLE:
print("❌ sklearn is required. Install with: pip install scikit-learn")
return 1
# Load Diabetes dataset
print("\n📁 Loading Diabetes dataset...")
from sklearn.datasets import load_diabetes
data = load_diabetes()
X = data.data.astype(np.float32)
y = data.target.reshape(-1, 1).astype(np.float32)
print(f"✅ Dataset: {len(X)} samples")
print(f"✅ Features: {X.shape[1]} (age, sex, bmi, blood pressure, etc.)")
print(f"✅ Target: Disease progression measure")
# Normalize features and target
scaler_X = StandardScaler()
scaler_y = StandardScaler()
X = scaler_X.fit_transform(X).astype(np.float32)
y = scaler_y.fit_transform(y).astype(np.float32)
# Split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
print(f"✅ Split: {len(X_train)} train, {len(X_test)} test")
# Create network
print("\n🧠 Creating regression network...")
n_features = X.shape[1]
net = bd.Network([n_features, 64, 32, 1], bd.ActivationType.ReLU)
print(f"✅ Network: {n_features} -> 64 -> 32 -> 1")
# Training
print("\n🏋️ Training...")
epochs = 200
X_flat = X_train.flatten().tolist()
y_flat = y_train.flatten().tolist()
for epoch in range(epochs):
loss = net.train_batch(X_flat, y_flat, bd.LossType.MSE, 0.01, len(X_train))
if epoch % 50 == 0:
print(f" Epoch {epoch:3d}: Loss = {loss:.4f}")
print("✅ Training complete!")
# Evaluation
print("\n📈 Evaluating...")
predictions = [net.forward(X_test[i].tolist(), 1, False)[0] for i in range(len(X_test))]
predictions = np.array(predictions)
# Inverse transform for interpretable metrics
y_test_orig = scaler_y.inverse_transform(y_test)
pred_orig = scaler_y.inverse_transform(predictions.reshape(-1, 1))
mse = bd.compute_mse(y_test.flatten().tolist(), predictions.tolist())
r2 = bd.compute_r2_score(y_test.flatten().tolist(), predictions.tolist())
print(f"✅ Test MSE (normalized): {mse:.4f}")
print(f"✅ Test R²: {r2:.4f}")
# Sample predictions
print("\n🔍 Sample Predictions:")
for i in range(5):
true_val = y_test_orig[i][0]
pred_val = pred_orig[i][0]
print(f" True: {true_val:6.1f} Pred: {pred_val:6.1f}")
print("\n" + "=" * 60)
print("✅ Complete!")
return 0
if __name__ == "__main__":
exit(main())