SuperNN  1.0.0
activation.hpp
1 /*
2  This file is part of SuperNN.
3 
4  SuperNN is free software: you can redistribute it and/or modify
5  it under the terms of the GNU Lesser General Public License as published by
6  the Free Software Foundation, either version 3 of the License, or
7  (at your option) any later version.
8 
9  SuperNN is distributed in the hope that it will be useful,
10  but WITHOUT ANY WARRANTY; without even the implied warranty of
11  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12  GNU Lesser General Public License for more details.
13 
14  You should have received a copy of the GNU Lesser General Public License
15  along with SuperNN. If not, see <http://www.gnu.org/licenses/>.
16 
17  Copyright (C) 2010 - 2015 Lucas Hermann Negri
18 */
19 
20 #ifndef SUPERNN_ACTIVATION_HPP
21 #define SUPERNN_ACTIVATION_HPP
22 
23 #include <cmath>
24 #include "neuron.hpp"
25 #include "utils.hpp"
26 
27 namespace SuperNN
28 {
37 struct Sigmoid
38 {
39  static inline double activation(const Neuron &neuron)
40  {
41  return 1 / (1 + std::exp(-2 * neuron.steep * neuron.net));
42  }
43 
44  static inline double derivative(const Neuron &neuron)
45  {
46  double out = Utils::limit(0.01, 0.99, neuron.out);
47  return 2 * neuron.steep * out * (1 - out);
48  }
49 };
50 
56 {
57  static inline double activation(const Neuron &neuron)
58  {
59  return 2 / (1 + std::exp(-2 * neuron.steep * neuron.net)) - 1.0;
60  }
61 
62  static inline double derivative(const Neuron &neuron)
63  {
64  double out = Utils::limit(-0.98, 0.98, neuron.out);
65  return neuron.steep * (1 - (out * out));
66  }
67 };
68 
73 struct Elliot
74 {
75  static inline double activation(const Neuron &neuron)
76  {
77  double t = neuron.net * neuron.steep;
78  return (t / 2) / (1 + std::abs(t)) + 0.5;
79  }
80 
81  static inline double derivative(const Neuron &neuron)
82  {
83  double t = 1 + fabs(neuron.net * neuron.steep);
84  return neuron.steep / (2 * t * t);
85  }
86 };
87 
93 {
94  static inline double activation(const Neuron &neuron)
95  {
96  double t = neuron.net * neuron.steep;
97  return t / (1 + std::abs(t));
98  }
99 
100  static inline double derivative(const Neuron &neuron)
101  {
102  double t = 1 + fabs(neuron.net * neuron.steep);
103  return neuron.steep / (t * t);
104  }
105 };
106 
111 struct Gaussian
112 {
113  static inline double activation(const Neuron &neuron)
114  {
115  double a = neuron.net * neuron.steep;
116  return std::exp(-a * a);
117  }
118 
119  static inline double derivative(const Neuron &neuron)
120  {
121  return -2 * neuron.net * neuron.out * neuron.steep * neuron.steep;
122  }
123 };
124 
130 {
131  static inline double activation(const Neuron &neuron)
132  {
133  double a = neuron.net * neuron.steep;
134  return 2 * exp(-a * a) - 1;
135  }
136 
137  static inline double derivative(const Neuron &neuron)
138  {
139  return -2 * neuron.net * (neuron.out + 1) * neuron.steep * neuron.steep;
140  }
141 };
142 
147 struct Linear
148 {
149  static inline double activation(const Neuron &neuron)
150  {
151  return neuron.steep * neuron.net;
152  }
153 
154  static inline double derivative(const Neuron &neuron)
155  {
156  return neuron.steep;
157  }
158 };
159 
164 struct Sign
165 {
166  static inline double activation(const Neuron &neuron)
167  {
168  return neuron.net >= 0 ? 1 : -1;
169  }
170 
171  static inline double derivative(const Neuron &neuron)
172  {
173  (void)neuron;
175  }
176 };
177 
179 struct ActFunc
180 {
187  static inline double activation(const Neuron &neuron)
188  {
189  switch(neuron.act_func)
190  {
191  case ACT_SIGMOID:
192  return Sigmoid::activation(neuron);
193 
195  return SigmoidSymmetric::activation(neuron);
196 
197  case ACT_LINEAR:
198  return Linear::activation(neuron);
199 
200  case ACT_ELLIOT:
201  return Elliot::activation(neuron);
202 
204  return ElliotSymmetric::activation(neuron);
205 
206  case ACT_GAUSSIAN:
207  return Gaussian::activation(neuron);
208 
210  return GaussianSymmetric::activation(neuron);
211 
212  case ACT_SIGN:
213  return Sign::activation(neuron);
214  }
215 
216  // should not be reached
217  return 0.0;
218  }
219 
227  static inline double derivative(const Neuron &neuron)
228  {
229  switch(neuron.act_func)
230  {
231  case ACT_SIGMOID:
232  return Sigmoid::derivative(neuron);
233 
235  return SigmoidSymmetric::derivative(neuron);
236 
237  case ACT_LINEAR:
238  return Linear::derivative(neuron);
239 
240  case ACT_ELLIOT:
241  return Elliot::derivative(neuron);
242 
244  return ElliotSymmetric::derivative(neuron);
245 
246  case ACT_GAUSSIAN:
247  return Gaussian::derivative(neuron);
248 
250  return GaussianSymmetric::derivative(neuron);
251 
252  case ACT_SIGN:
253  return Sign::derivative(neuron);
254  }
255 
256  // should not be reached
257  return 0.0;
258  }
259 };
260 
261 }
262 
263 #endif
Neuron, that can contain connections to neurons in the next layers.
Definition: neuron.hpp:70
Activation function dispatcher.
Definition: activation.hpp:179
Elliot sigmoid-like function.
Definition: activation.hpp:73
Sigmoid symmetric activation function.
static double activation(const Neuron &neuron)
Definition: activation.hpp:166
Elliot sigmoid-like function (Symmetric).
Definition: activation.hpp:92
static double derivative(const Neuron &neuron)
Definition: activation.hpp:137
static double derivative(const Neuron &neuron)
Definition: activation.hpp:119
thrown when training with a non-diferentiable activation function
Definition: utils.hpp:57
static double activation(const Neuron &neuron)
Definition: activation.hpp:39
double steep
Activation function steepness.
Definition: neuron.hpp:151
static double activation(const Neuron &neuron)
Calls the actual activation function.
Definition: activation.hpp:187
Sign function (net >= 0 ? 1 : -1).
Definition: activation.hpp:164
Linear activation function.
double out
Last output of the neuron ( g(net) )
Definition: neuron.hpp:139
Sigmoid-like activation function.
static double activation(const Neuron &neuron)
Definition: activation.hpp:57
static double derivative(const Neuron &neuron)
Definition: activation.hpp:154
Linear function.
Definition: activation.hpp:147
Gaussian symmetric function.
Definition: activation.hpp:129
Gaussian function.
Definition: activation.hpp:111
static double derivative(const Neuron &neuron)
Calls the actual derivative of the activation function, used to calculate the error gradient...
Definition: activation.hpp:227
static double activation(const Neuron &neuron)
Definition: activation.hpp:131
static double activation(const Neuron &neuron)
Definition: activation.hpp:94
Gaussian symmetric function.
static double derivative(const Neuron &neuron)
Definition: activation.hpp:100
ActFuncType act_func
Used activation function.
Definition: neuron.hpp:148
Sigmoid activation function.
static double activation(const Neuron &neuron)
Definition: activation.hpp:75
The exception can be identified by the type() method.
Definition: utils.hpp:69
Sigmoid symmetric function.
Definition: activation.hpp:55
Activation functions were not implemented in an OO way due to performance.
Definition: activation.hpp:37
static double derivative(const Neuron &neuron)
Definition: activation.hpp:44
double net
Last sum of the neuron inputs.
Definition: neuron.hpp:136
static double activation(const Neuron &neuron)
Definition: activation.hpp:113
Sigmoid-like activation function, symmetric version.
static double derivative(const Neuron &neuron)
Definition: activation.hpp:81
Gaussian activation function.
static double derivative(const Neuron &neuron)
Definition: activation.hpp:171
static double activation(const Neuron &neuron)
Definition: activation.hpp:149
static double derivative(const Neuron &neuron)
Definition: activation.hpp:62
double limit(double min, double max, double value)
Returns the value limited to a range.
Definition: utils.hpp:117