20 #ifndef SUPERNN_TRAINING_HPP
21 #define SUPERNN_TRAINING_HPP
47 virtual void prepare(
Network &net);
59 virtual unsigned train(
Network &net,
const Data &data,
double dmse = 0,
60 unsigned max_epochs = 1000) = 0;
79 virtual double delta(
Network &net,
unsigned l,
unsigned n);
89 virtual void derror_acc(
Network &net);
96 virtual void clear_derror_acc(
Network &net);
107 virtual void check(
const Network& net,
const Data& data)
const;
116 virtual ~ImplBackprop();
118 virtual unsigned train(
Network &net,
const Data &data,
double dmse,
unsigned max_epochs) = 0;
144 virtual void update_weights(
Network &net,
double factor = 1);
153 virtual void update_eta(
double mse,
double last_mse);
163 virtual ~Incremental();
164 virtual unsigned train(
Network &net,
const Data &data,
double dmse = 0,
unsigned max_epochs = 1000);
175 virtual unsigned train(
Network &net,
const Data &data,
double dmse = 0,
unsigned max_epochs = 1000);
189 virtual void prepare(
Network &net);
190 virtual unsigned train(
Network &net,
const Data &data,
double dmse = 0,
unsigned max_epochs = 1000);
213 virtual void update_weights(
Network &net);
225 double delta(
Network &net,
unsigned l,
unsigned n);
240 unsigned train(
Network &net,
const Data &data,
double dmse = 0,
unsigned max_epochs = 100);
254 double delta(
Network &net,
unsigned l,
unsigned n,
unsigned m);
261 void get_weights(
const Network &net);
268 void set_weights(
Network &net)
const;
276 void calc_jacobian_transp_line(
Network &net,
unsigned m);
284 void update_hessian_gradient(
double err);
290 void update_weights(
Network &net);
300 bool update_mu(
double mse,
double last_mse);
331 unsigned local_iters;
357 unsigned k = 10,
double dmse = 0,
unsigned max_epochs = 1000);
376 const SuperNN::Data &validation,
unsigned step_size = 1,
unsigned max_stuck = 20,
unsigned max_epochs = 1000);
double eta_df
Learning rate decrease factor (must be <= 1)
Modified improved resilient backpropagation algorithm.
Abstract class that provides the calculation of the error derivatives and the error accumulation...
Incremental backpropagation.
SUPERNN_EXPORT unsigned early_stopping(TrainingAlgorithm &algo, Network &net, const Data &training, const SuperNN::Data &validation, unsigned step_size=1, unsigned max_stuck=20, unsigned max_epochs=1000)
Trains an artificial neural network by using early stopping in order to avoid over-fitting.
double delta_zero
Initial weight change.
Improved resilient backpropagation algorithm.
double eta_max
Maximum learning rate.
double delta_if
Weight change increase factor.
double delta_df
Weight change decrease factor.
double eta_min
Minimum learning rate.
Neuron by Neuron algorithm.
Base class for the standard backpropagation algorithm.
double eta_if
Learning rate increase factor (must be >= 1)
double eta
Initial learning rate.
Artificial neural network structure that supports arbitrary feedforward topologies, like multilayer perceptrons and fully connected cascade networks.
Data used in training, validation and testing.
double delta_max
Maximum weight change.
SUPERNN_EXPORT double k_fold_error(TrainingAlgorithm &algo, const SuperNN::Network &net, const Data &data, unsigned k=10, double dmse=0, unsigned max_epochs=1000)
Estimates the performance of a neural network for an independent data set by using k-fold cross valid...
double delta_min
Minimum weight change.