-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathexamples.cpp
129 lines (99 loc) · 3.36 KB
/
examples.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
#include <iostream>
#include <functional>
#include <memory>
#include <set>
#include <vector>
#include <random>
#include <cmath>
#include "Value.h"
#include "MLP.h"
// #include "tests.cpp"
#include "utils.h"
void test_xor() {
std::vector<double> ys = {};
std::vector<std::vector<double>> x = {
{0, 0},
{0, 1},
{1, 0},
{1, 1}
};
std::vector<double> yn = {0, 1, 1, 0};
std::vector<std::shared_ptr<Value>> yn_valobjs = {};
for (int i = 0; i < yn.size(); i++) {
yn_valobjs.push_back(std::make_shared<Value>(yn[i]));
}
MLP mlp(2, {3, 1});
//creating input vectors with value objects
std::vector<std::vector<std::shared_ptr<Value>>> xs_valobjs;
for (const auto& row : x) {
std::vector<std::shared_ptr<Value>> val_row;
for (float val : row) {
val_row.push_back(std::make_shared<Value>(val));
}
xs_valobjs.push_back(val_row);
}
int epochs = 100;
int i;
for (i = 0; i<epochs; i++) {
std::vector<std::shared_ptr<Value>> ypred;
std::vector<std::shared_ptr<Value>> out;
for (int j = 0; j < xs_valobjs.size(); j++) {
out = mlp(xs_valobjs[j]);
ypred.push_back(out[0]); //out is a vector of shared_ptr value objects
}
auto loss = msee(ypred, yn_valobjs);
// auto loss = mlp.msee(ypred, yn_valobjs);
auto all_params = mlp.get_all_params();
for (int k = 0; k < all_params.size(); k++) {
all_params[k]->grad = 0.0;
}
backward(loss);
for (int l = 0; l < all_params.size(); l++) {
all_params[l]->data += -0.01 * all_params[l]->grad;
}
std::cout << "Iteration: " << i << " Loss: " << loss->data << "\n";
}
}
void binary_classification() {
auto x = std::vector<std::shared_ptr<Value>>(); //vector of pointers to a Value object
std::vector<int> layer_sizes = {4, 4, 1};
std::vector<std::vector<float>> xs = {
{2.0, 3.0, -1.0},
{3.0, -1.0, 0.5},
{0.5, 1.0, 1.0},
{1.0, 1.0, -1.0}
};
std::vector<float> ys = {1.0, -1.0, -1.0, 1.0};
std::vector<std::shared_ptr<Value>> ys_valobjs;
for (int i = 0; i < ys.size(); i++) {
ys_valobjs.push_back(std::make_shared<Value>(ys[i]));
}
MLP mlp(3, layer_sizes);
std::vector<std::vector<std::shared_ptr<Value>>> xs_valobjs;
for (const auto& row : xs) {
std::vector<std::shared_ptr<Value>> val_row;
for (float val : row) {
val_row.push_back(std::make_shared<Value>(val));
}
xs_valobjs.push_back(val_row);
}
int epochs = 100;
for (int i = 0; i < epochs; i++) {
std::vector<std::shared_ptr<Value>> ypred;
std::vector<std::shared_ptr<Value>> out;
for (int j = 0; j < xs_valobjs.size(); j++) {
out = mlp(xs_valobjs[j]);
ypred.push_back(out[0]);
}
auto loss = msee(ypred, ys_valobjs);
auto all_params = mlp.get_all_params();
for (int k = 0; k < all_params.size(); k++) {
all_params[k]->grad = 0.0;
}
backward(loss);
for (int l = 0; l < all_params.size(); l++) {
all_params[l]->data += -0.01 * all_params[l]->grad;
}
std::cout << "Iteration: " << i << " Loss: " << loss->data << "\n";
}
}