-
Notifications
You must be signed in to change notification settings - Fork 0
/
net.h
161 lines (138 loc) · 4.56 KB
/
net.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
#include "functions.h"
struct Weight {
public:
double value = 0; //valore del collegamento ocn il neurone
};
struct Neuron {
public:
double value = 0; //attuale valore del neurone outtenuto tramite la funzione sigmoid quindi è un numero compreso tra 0 e 1
vector<Weight> weights; //collegamenti con gli altri neuroni
};
struct Layer {
public:
vector<Neuron> neurons;
};
class NeuralNet {
public:
vector<Layer> layers;
int generate_net(vector<int> model, float min = -1, float max = 1, float bias = 1);
int load_inputs(vector<double>* inputs, bool normalize_inputs);
int load_net(string path);
int save_net(string path);
vector<double> calculate_output();
void mutate() {
int l = Random::random_value(0, layers.size() - 1);
int n = Random::random_value(0, layers[l].neurons.size() - 1);
int w = Random::random_value(0, layers[l].neurons[n].weights.size() - 1);
layers[l].neurons[n].weights[w].value = Random::random_value(-1, 1);
}
NeuralNet crossingover(NeuralNet b, float prob = 50) {
NeuralNet net = *this;
for (int l = 0; l < layers.size(); l++) {
for (int n = 0; n < layers[l].neurons.size(); n++) {
for (int w = 0; w < layers[l].neurons[n].weights.size(); w++) {
if (rand() % 100 >= prob) {
net.layers[l].neurons[n].weights[w] = b.layers[l].neurons[n].weights[w];
}
}
}
}
return net;
}
};
int NeuralNet::generate_net(vector<int> model, float min, float max , float bias) {
for (int l = 0; l < model.size(); l++){
Layer layer;
for (int n = 0; n < model[l]; n++) {
Neuron neuron;
for (int w = 0; l != model.size() - 1 && w < model[l + 1] ; w++) {
Weight weight;
weight.value = Random::random_value(min, max);
neuron.weights.push_back(weight);
}
layer.neurons.push_back(neuron);
}
if (l < model.size() - 1) {
//Bias
Neuron neuron;
neuron.value = bias;
for (int w = 0; w < model[l + 1]; w++) {
Weight weight;
weight.value = Random::random_value(min, max);
neuron.weights.push_back(weight);
}
layer.neurons.push_back(neuron);
}
layers.push_back(layer);
}
return 1;
}
int NeuralNet::load_inputs(vector<double>* inputs, bool normalize_inputs) {
for (int neuron = 0; neuron < layers[0].neurons.size() - 1 && neuron < (*inputs).size(); neuron++) {
double input = (*inputs)[neuron];
//se input non è compreso tra 0 e 1 allora riducilo a un numero tra 0 e 1 se
if (normalize_inputs == true) {
if (input < 0 || input > 1) {
input = sigmoid(input);
}
}
layers[0].neurons[neuron].value = input;
}
return 1;
}
int NeuralNet::load_net(string path) {
ifstream in(path);
for (auto& layer : layers) {
for (auto& neuron : layer.neurons) {
for (auto& weight : neuron.weights) {
in >> weight.value;
}
}
}
return 1;
}
int NeuralNet::save_net(string path) {
ofstream out(path);
for (auto& layer : layers) {
for (auto& neuron : layer.neurons) {
for (auto& weight : neuron.weights) {
out << weight.value;
out << " ";
}
out << endl;
}
}
return 1;
}
vector<double> NeuralNet::calculate_output() {
//calcolo del valore di ogni valore seguendo la formula value = sigmoid della sommatoria di ogni neurone collegato ad esso per la sua weight
for (int layer = 1; layer < layers.size(); layer++) {
for (int neuron = 0; neuron < layers[layer].neurons.size(); neuron++) {
if (layer < layers.size() - 1) {//esclude il bias
if (neuron != layers[layer].neurons.size() - 1) {
double sum=0;
for (int previus_neuron = 0; previus_neuron < layers[layer - 1].neurons.size(); previus_neuron++) {
sum += layers[layer - 1].neurons[previus_neuron].value * layers[layer - 1].neurons[previus_neuron].weights[neuron].value;
}
layers[layer].neurons[neuron].value = sigmoid(sum);
}
}
else
{
double sum = 0;
for (int previus_neuron = 0; previus_neuron < layers[layer - 1].neurons.size(); previus_neuron++) {
sum += layers[layer - 1].neurons[previus_neuron].value * layers[layer - 1].neurons[previus_neuron].weights[neuron].value;
}
layers[layer].neurons[neuron].value = sigmoid(sum);
}
}
}
//creazione del vettore output
vector<double> output;
//inizializzazione output copiando i valori dei neuroni nell ultimo layer
output.resize(layers[layers.size() - 1].neurons.size());
for (int neuron = 0; neuron < layers[layers.size() - 1].neurons.size(); neuron++) {
output[neuron] = layers[layers.size() - 1].neurons[neuron].value;
}
return output;
}