SuperNN  1.0.0
network.cpp
1 /*
2  This file is part of SuperNN.
3 
4  SuperNN is free software: you can redistribute it and/or modify
5  it under the terms of the GNU Lesser General Public License as published by
6  the Free Software Foundation, either version 3 of the License, or
7  (at your option) any later version.
8 
9  SuperNN is distributed in the hope that it will be useful,
10  but WITHOUT ANY WARRANTY; without even the implied warranty of
11  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12  GNU Lesser General Public License for more details.
13 
14  You should have received a copy of the GNU Lesser General Public License
15  along with SuperNN. If not, see <http://www.gnu.org/licenses/>.
16 
17  Copyright (C) 2010 - 2015 Lucas Hermann Negri
18 */
19 
20 #include <locale>
21 #include <fstream>
22 #include "activation.hpp"
23 #include "utils.hpp"
24 #include "neuron.hpp"
25 #include "network.hpp"
26 
27 namespace SuperNN
28 {
29 
31 {
32 }
33 
35 {
36 }
37 
39 {
40  push_back(n);
41 }
42 
43 void Layer::add_neurons(unsigned n_neurons, bool bias)
44 {
45  reserve(size() + n_neurons);
46 
47  for(unsigned n = 0; n < n_neurons; ++n)
48  {
49  Neuron l(bias);
50  add_neuron(l);
51  }
52 }
53 
54 void Layer::set_activation(ActFuncType type, double s)
55 {
56  for(unsigned n = 0, e = size(); n < e; ++n)
57  at(n).set_activation(type, s);
58 }
59 
60 void Layer::connect(unsigned to_layer, unsigned to_neuron)
61 {
62  for(unsigned n = 0, e = size(); n < e; ++n)
63  at(n).connect(to_layer, to_neuron);
64 }
65 
67 {
68  layers.push_back(l);
69 }
70 
71 void Network::add_layers(unsigned n_layers)
72 {
73  layers.reserve(layers.size() + n_layers);
74 
75  for(unsigned l = 0; l < n_layers; ++l)
76  {
77  Layer nl;
78  add_layer(nl);
79  }
80 }
81 
82 Network::Network() : n_input(0)
83 {
84 }
85 
87 {
88 }
89 
90 Network Network::make_mlp(unsigned input, unsigned hidden, unsigned output)
91 {
92  Network net;
93  net.add_layers(3);
94 
95  Layer &input_layer = net.layers[0];
96  Layer &hidden_layer = net.layers[1];
97  Layer &output_layer = net.layers[2];
98 
99  input_layer.add_neurons(input);
100  input_layer.add_neurons(1, true);
101  hidden_layer.add_neurons(hidden);
102  output_layer.add_neurons(output);
103 
104  net.connect(0, 1);
105  net.connect_neuron_to_layer(0, input, 2);
106  net.connect(1, 2);
107 
108  return net;
109 }
110 
111 Network Network::make_mlp(unsigned input, unsigned hidden1, unsigned hidden2, unsigned output)
112 {
113  Network net;
114  net.add_layers(4);
115 
116  Layer &input_layer = net.layers[0];
117  Layer &hidden1_layer = net.layers[1];
118  Layer &hidden2_layer = net.layers[2];
119  Layer &output_layer = net.layers[3];
120 
121  input_layer.add_neurons(input);
122  input_layer.add_neurons(1, true);
123  hidden1_layer.add_neurons(hidden1);
124  hidden2_layer.add_neurons(hidden2);
125  output_layer.add_neurons(output);
126 
127  net.connect(0, 1);
128 
129  net.connect_neuron_to_layer(0, input, 2);
130  net.connect(1, 2);
131 
132  net.connect_neuron_to_layer(0, input, 3);
133  net.connect(2, 3);
134 
135  return net;
136 }
137 
138 Network Network::make_mlp(unsigned input, unsigned output)
139 {
140  Network net;
141  net.add_layers(2);
142 
143  Layer &input_layer = net.layers[0];
144  Layer &output_layer = net.layers[1];
145 
146  input_layer.add_neurons(input);
147  input_layer.add_neurons(1, true);
148  output_layer.add_neurons(output);
149 
150  net.connect(0, 1);
151 
152  return net;
153 }
154 
155 Network Network::make_fcc(unsigned input, unsigned hidden, unsigned output)
156 {
157  Network net;
158  net.add_layers(hidden + 2);
159 
160  net.layers[0].add_neurons(input);
161  net.layers[0].add_neurons(1, true);
162 
163  for(unsigned l = 1; l <= hidden; ++l)
164  net.layers[l].add_neurons(1);
165 
166  unsigned last = net.layers.size() - 1;
167  net.layers[last].add_neurons(output);
168 
169  for(unsigned l1 = 0; l1 < last; ++l1)
170  for(unsigned l2 = l1+1; l2 <= last; ++l2)
171  net.connect(l1, l2);
172 
173  return net;
174 }
175 
176 const Row &Network::run(const Row &in, bool calc_error)
177 {
178  const unsigned last_layer = layers.size() - 1;
179  const unsigned in_size = in.size();
180  clear_neurons(true, true);
181 
182  /* copies the inputs to the first neuron layer and count the non-biases */
183  n_input = 0;
184 
185  for(unsigned i = 0, e = layers[0].size(); i < e; ++i)
186  {
187  if(!layers[0][i].bias)
188  {
189  if(n_input >= in_size)
191  else
192  layers[0][i].out = in[n_input++];
193  }
194  }
195 
196  /* propagate the signal */
197  for(unsigned l = 0; l < last_layer; ++l)
198  {
199  for(unsigned n = 0, e = layers[l].size(); n < e; ++n)
200  {
201  Neuron &neuron = layers[l][n];
202 
203  for(unsigned c = 0, e = neuron.size(); c < e; ++c)
204  {
205  Connection &conn = neuron[c];
206  double v = neuron.out * conn.weight;
207  layers[conn.to_layer][conn.to_neuron].net += v;
208  }
209  }
210 
211  for(unsigned n = 0, e = layers[l + 1].size(); n < e; ++n)
212  {
213  Neuron &neuron = layers[l + 1][n];
214  neuron.out = ActFunc::activation(neuron);
215  }
216  }
217 
218  const unsigned n_out = layers[last_layer].size();
219  last_output.resize(n_out);
220 
221  for(unsigned n = 0; n < n_out; ++n)
222  {
223  Neuron &neuron = layers[last_layer][n];
224  last_output[n] = neuron.out;
225 
226  if(calc_error)
227  neuron.err = in[n_input + n] - neuron.out;
228  }
229 
230  return last_output;
231 }
232 
233 double Network::calc_mse(const Data &data)
234 {
235  const unsigned n_out = layers[layers.size() - 1].size();
236  double mse = 0;
237 
238  for(unsigned p = 0, e = data.size(); p < e; ++p)
239  {
240  const Row& r = run(data[p], true);
241 
242  for(unsigned n = 0; n < n_out; ++n)
243  {
244  double err = data[p][n_input + n] - r[n];
245  mse += err * err;
246  }
247  }
248 
249  mse /= data.size();
250 
251  return mse;
252 }
253 
254 double Network::calc_mae(const Data &data)
255 {
256  const unsigned n_out = layers[layers.size() - 1].size();
257  double mae = 0;
258 
259  for(unsigned p = 0, e = data.size(); p < e; ++p)
260  {
261  const Row& r = run(data[p], true);
262 
263  for(unsigned n = 0; n < n_out; ++n)
264  {
265  double err = data[p][n_input + n] - r[n];
266  mae += std::abs(err);
267  }
268  }
269 
270  mae /= data.size();
271 
272  return mae;
273 }
274 
275 double Network::calc_class(const Data &data, double limit)
276 {
277  unsigned nt = 0;
278 
279  for(unsigned p = 0, e = data.size(); p < e; ++p)
280  {
281  run(data[p]);
282  const unsigned last = layers.size() - 1;
283  unsigned s = 0;
284  const unsigned n_output = layers[last].size();
285 
286  for(unsigned n = 0; n < n_output; ++n)
287  {
288  if(std::abs(layers[last][n].out - data[p][n_input + n]) < limit)
289  ++s;
290  }
291 
292  if(s == n_output)
293  ++nt;
294  }
295 
296  return nt * 100 / (double)data.size();
297 }
298 
299 double Network::calc_class_higher(const Data &data)
300 {
301  unsigned nt = 0;
302 
303  for(unsigned p = 0, e = data.size(); p < e; ++p)
304  {
305  run(data[p]);
306  const unsigned last = layers.size() - 1;
307  const unsigned n_output = layers[last].size();
308  unsigned max1 = -1, max2 = -1;
309  double val1 = -1e30, val2 = -1e30;
310 
311  for(unsigned n = 0; n < n_output; ++n)
312  {
313  if(layers[last][n].out > val1)
314  val1 = layers[last][n].out, max1 = n;
315 
316  if(data[p][n_input + n] > val2)
317  val2 = data[p][n_input + n], max2 = n;
318  }
319 
320  if(max1 == max2)
321  ++nt;
322  }
323 
324  return nt * 100 / (double)data.size();
325 }
326 
327 void Network::init_weights(double min, double max)
328 {
329  for(unsigned l = 0, e = layers.size(); l < e; ++l)
330  {
331  for(unsigned n = 0, e = layers[l].size(); n < e; ++n)
332  {
333  Neuron &neuron = layers[l][n];
334 
335  for(unsigned c = 0, e = neuron.size(); c < e; ++c)
336  neuron[c].weight = Utils::rand_double(max - min) + min;
337  }
338  }
339 }
340 
342 {
343  for(unsigned l = 0, e = layers.size(); l < e; ++l)
344  layers[l].set_activation(type, s);
345 }
346 
347 void Network::save_file(const std::string &path) const
348 {
349  std::ofstream out;
350  out.open(path.c_str());
351 
352  if(!out.is_open())
354 
355  out.imbue(std::locale("C"));
356 
357  out << layers.size() << std::endl;
358  out.precision(file_precision);
359 
360  for(unsigned l = 0, e = layers.size(); l < e; ++l)
361  {
362  out << layers[l].size() << std::endl;
363 
364  for(unsigned n = 0, e = layers[l].size(); n < e; ++n)
365  {
366  const Neuron &neuron = layers[l][n];
367  out << neuron.act_func << " " << neuron.steep << " " << neuron.bias << std::endl;
368  out << neuron.size() << std::endl;
369 
370  for(unsigned c = 0, e = neuron.size(); c < e; ++c)
371  {
372  const Connection &conn = neuron[c];
373  out << conn.weight << " " << conn.to_layer << " " << conn.to_neuron << std::endl;
374  }
375  }
376  }
377 
378  out.close();
379 }
380 
381 void Network::load_file(const std::string &path)
382 {
383  layers.clear();
384  last_output.clear();
385  n_input = 0;
386 
387  std::ifstream inp;
388  inp.open(path.c_str());
389 
390  if(!inp.is_open())
392 
393  inp.imbue(std::locale("C"));
394 
395  int n_layers;
396  inp >> n_layers;
397 
398  if(n_layers < 1)
399  {
400  inp.close();
402  }
403 
404  layers.resize(n_layers);
405 
406  for(int l = 0; l < n_layers; ++l)
407  {
408  int n_neurons;
409  inp >> n_neurons;
410 
411  if(n_neurons < 1)
412  {
413  inp.close();
415  }
416 
417  layers[l].resize(n_neurons);
418 
419  for(int n = 0; n < n_neurons; ++n)
420  {
421  Neuron &neuron = layers[l][n];
422  int act, bias; // read as int
423  inp >> act >> neuron.steep >> bias;
424  neuron.act_func = (ActFuncType)act;
425  neuron.bias = (bool)bias; // ignore VC performance warning
426 
427  int n_conns;
428  inp >> n_conns;
429 
430  if(n_conns < 0)
431  {
432  inp.close();
434  }
435 
436  neuron.conns.resize(n_conns);
437 
438  for(int c = 0; c < n_conns; ++c)
439  {
440  Connection &conn = neuron[c];
441  inp >> conn.weight >> conn.to_layer >> conn.to_neuron;
442  }
443  }
444  }
445 
446  inp.close();
447 }
448 
449 void Network::connect(unsigned from_layer, unsigned to_layer)
450 {
451  for(unsigned n = 0, e = layers[to_layer].size(); n < e; ++n)
452  layers[from_layer].connect(to_layer, n);
453 }
454 
455 void Network::connect_neuron_to_layer(unsigned from_layer, unsigned from_neuron, unsigned to_layer)
456 {
457  Neuron &neuron = layers[from_layer][from_neuron];
458 
459  for(unsigned n = 0, e = layers[to_layer].size(); n < e; ++n)
460  neuron.connect(to_layer, n);
461 }
462 
463 void Network::clear_neurons(bool clear_delta, bool clear_run)
464 {
465  for(unsigned l = 0, e = layers.size(); l < e; ++l)
466  {
467  for(unsigned n = 0, e = layers[l].size(); n < e; ++n)
468  {
469  Neuron &neuron = layers[l][n];
470 
471  if(clear_delta)
472  {
473  neuron.delta = 0;
474  neuron.delta_ok = false;
475  }
476 
477  if(clear_run)
478  {
479  neuron.out = neuron.bias ? 1 : 0;
480  neuron.net = 0;
481  neuron.err = 0;
482  }
483  }
484  }
485 }
486 
488 {
489  unsigned n_weights = 0;
490 
491  for(unsigned l = 0, e = layers.size(); l < e; ++l)
492  {
493  const Layer &layer = layers[l];
494 
495  for(unsigned n = 0, e = layer.size(); n < e; ++n)
496  n_weights += layer[n].size();
497  }
498 
499  return n_weights;
500 }
501 
503 {
504  unsigned n_neurons = 0;
505 
506  for(unsigned l = 0, e = layers.size(); l < e; ++l)
507  n_neurons += layers[l].size();
508 
509  return n_neurons;
510 }
511 
512 unsigned Network::calc_num_inputs() const
513 {
514  unsigned n_inputs = 0;
515 
516  for(unsigned i = 0, e = layers[0].size(); i < e; ++i)
517  {
518  if(!layers[0][i].bias)
519  ++n_inputs;
520  }
521 
522  return n_inputs;
523 }
524 
525 const Layer &Network::operator[](unsigned l) const
526 {
527  return layers[l];
528 }
529 
531 {
532  return layers[l];
533 }
534 
535 unsigned Network::size() const
536 {
537  return layers.size();
538 }
539 
540 }
Neuron, that can contain connections to neurons in the next layers.
Definition: neuron.hpp:70
std::vector< Connection > conns
Synaptic connections.
Definition: neuron.hpp:133
double calc_class(const Data &data, double limit=0.5)
Calculates the classification rate of the network related to a data.
Definition: network.cpp:275
void connect_neuron_to_layer(unsigned from_layer, unsigned from_neuron, unsigned to_layer)
Connects a neuron to all the neurons of another layer.
Definition: network.cpp:455
thrown when a file couldn't be opened
Definition: utils.hpp:45
unsigned calc_num_weights() const
Calculates the current number of weights.
Definition: network.cpp:487
virtual ~Layer()
Definition: network.cpp:34
void add_neurons(unsigned n_neurons, bool bias=false)
Adds a number of neurons to the layer.
Definition: network.cpp:43
Synaptic connection between two neurons.
Definition: neuron.hpp:32
void set_activation(ActFuncType type, double s=1)
Sets the activation function for all the neurons currently in the network.
Definition: network.cpp:341
double calc_mse(const Data &data)
Calculates the mean squared error of the network related to a data.
Definition: network.cpp:233
bool delta_ok
Marks if the delta has been calculated for the current iteration.
Definition: neuron.hpp:154
std::vector< Layer > layers
Neuron layers.
Definition: network.hpp:292
const unsigned file_precision
Precision used when writting floating point number to files.
Definition: utils.hpp:39
unsigned calc_num_inputs() const
Calculates the number of neurons on the first layer that aren't biases.
Definition: network.cpp:512
void add_layer(Layer &l)
Adds a layer to the network.
Definition: network.cpp:66
double calc_mae(const Data &data)
Calculates the mean absolute error of the network related to a data.
Definition: network.cpp:254
void add_neuron(Neuron &n)
Adds a neuron to the layer.
Definition: network.cpp:38
static Network make_mlp(unsigned input, unsigned hidden, unsigned output)
Constructs a 'standard' feed forward neural network with one hidden layer.
Definition: network.cpp:90
void add_layers(unsigned n_layers)
Adds a number of layers to the network.
Definition: network.cpp:71
double steep
Activation function steepness.
Definition: neuron.hpp:151
static double activation(const Neuron &neuron)
Calls the actual activation function.
Definition: activation.hpp:187
static Network make_fcc(unsigned input, unsigned hidden, unsigned output)
Constructs a fully connected cascade neural network.
Definition: network.cpp:155
void connect(unsigned to_layer, unsigned to_neuron)
Adds a connection to a neuron.
Definition: neuron.cpp:46
double weight
Weight.
Definition: neuron.hpp:49
double out
Last output of the neuron ( g(net) )
Definition: neuron.hpp:139
SUPERNN_EXPORT double rand_double(double max)
Returns a pseudo-random double.
Definition: utils.cpp:36
double delta
Last local error gradient.
Definition: neuron.hpp:145
unsigned n_input
Last computed number of neurons in the input layer that aren't biases, computed by run()...
Definition: network.hpp:303
void init_weights(double min=-0.5, double max=0.5)
Initializes the weights with pseudo-ramdom numbers.
Definition: network.cpp:327
void connect(unsigned from_layer, unsigned to_layer)
Connects all the neurons from a layer to all the neurons of another layer.
Definition: network.cpp:449
unsigned calc_num_neurons() const
Calculates the current number of neurons.
Definition: network.cpp:502
const Row & run(const Row &in, bool calc_error=false)
Propagates an input in the network.
Definition: network.cpp:176
unsigned size() const
Returns the number of synaptic connections.
Definition: neuron.hpp:111
double calc_class_higher(const Data &data)
Calculates the classification rate of the network related to a data.
Definition: network.cpp:299
ActFuncType act_func
Used activation function.
Definition: neuron.hpp:148
Artificial neural network structure that supports arbitrary feedforward topologies, like multilayer perceptrons and fully connected cascade networks.
Definition: network.hpp:78
void set_activation(ActFuncType type, double s=1)
Sets the activation function for all the neurons currently in the layer.
Definition: network.cpp:54
double err
Last error (desired - actual).
Definition: neuron.hpp:142
const Layer & operator[](unsigned l) const
Returns a const reference to a layer.
Definition: network.cpp:525
unsigned to_neuron
Position of the target neuron in it's layer.
Definition: neuron.hpp:64
The exception can be identified by the type() method.
Definition: utils.hpp:69
ActFuncType
Activation functions built-in in the library.
void connect(unsigned to_layer, unsigned to_neuron)
Connects all the neurons of the layer to a neuron.
Definition: network.cpp:60
void load_file(const std::string &path)
Loads the network contents from a file.
Definition: network.cpp:381
unsigned to_layer
Layer where the target neuron is located.
Definition: neuron.hpp:61
double net
Last sum of the neuron inputs.
Definition: neuron.hpp:136
std::vector< double > Row
Data row.
Definition: data.hpp:90
Array of neurons.
Definition: network.hpp:36
unsigned size() const
Returns the number of layers.
Definition: network.cpp:535
Row last_output
Structure that holds the last output values.
Definition: network.hpp:295
Data used in training, validation and testing.
Definition: data.hpp:95
thrown when a file has invalid contents
Definition: utils.hpp:48
bool bias
Marks if it's a bias neuron.
Definition: neuron.hpp:157
thrown when the dimensions of a Row and the network does not match
Definition: utils.hpp:60
void clear_neurons(bool clear_delta, bool clear_run)
Clears the neuron state.
Definition: network.cpp:463
void save_file(const std::string &path) const
Saves the network contents to a file, for latter use.
Definition: network.cpp:347
virtual ~Network()
Definition: network.cpp:86