9#ifndef LEVENBERGMARQUARDTALGORITHM_H
10#define LEVENBERGMARQUARDTALGORITHM_H
27#include "tensor_utilities.h"
28#include "optimization_algorithm.h"
32#include "../eigen/Eigen/Dense"
37struct LevenbergMarquardtAlgorithmData;
181 set(new_Levenberg_Marquardt_method_pointer);
184 virtual ~LevenbergMarquardtAlgorithmData() {}
186 void set(LevenbergMarquardtAlgorithm* new_Levenberg_Marquardt_method_pointer)
188 Levenberg_Marquardt_algorithm = new_Levenberg_Marquardt_method_pointer;
198 old_parameters.resize(parameters_number);
200 parameters_difference.resize(parameters_number);
202 potential_parameters.resize(parameters_number);
203 parameters_increment.resize(parameters_number);
206 LevenbergMarquardtAlgorithm* Levenberg_Marquardt_algorithm =
nullptr;
210 Tensor<type, 1> old_parameters;
211 Tensor<type, 1> parameters_difference;
213 Tensor<type, 1> parameters_increment;
217 type old_loss = type(0);
Levenberg-Marquardt Algorithm will always compute the approximate Hessian matrix, which has dimension...
TrainingResults perform_training()
virtual ~LevenbergMarquardtAlgorithm()
void set_maximum_selection_failures(const Index &)
const type & get_minimum_damping_parameter() const
Returns the minimum damping parameter allowed in the algorithm.
const type & get_maximum_time() const
Returns the maximum training time.
const type & get_loss_goal() const
void from_XML(const tinyxml2::XMLDocument &)
const Index & get_maximum_epochs_number() const
Returns the maximum number of iterations for training.
Tensor< string, 2 > to_string_matrix() const
Writes as matrix of strings the most representative atributes.
type minimum_loss_decrease
Minimum loss improvement between two successive iterations. It is used as a stopping criterion.
void update_parameters(const DataSetBatch &, NeuralNetworkForwardPropagation &, LossIndexBackPropagationLM &, LevenbergMarquardtAlgorithmData &)
LevenbergMarquardtAlgorithm::update_parameters.
string write_optimization_algorithm_type() const
Writes the optimization algorithm type.
type damping_parameter
Initial Levenberg-Marquardt parameter.
void set_maximum_time(const type &)
void set_loss_goal(const type &)
void set_damping_parameter(const type &)
type maximum_time
Maximum training time. It is used as a stopping criterion.
type damping_parameter_factor
Damping parameter increase/decrease factor.
void set_maximum_epochs_number(const Index &)
const type & get_damping_parameter_factor() const
Returns the damping parameter factor(beta in the User's Guide) for the hessian approximation.
void set_minimum_loss_decrease(const type &)
LevenbergMarquardtAlgorithm()
type minimum_damping_parameter
Minimum Levenberg-Marquardt parameter.
void set_minimum_damping_parameter(const type &)
type training_loss_goal
Goal value for the loss. It is used as a stopping criterion.
const type & get_maximum_damping_parameter() const
Returns the maximum damping parameter allowed in the algorithm.
Index maximum_epochs_number
Maximum number of epoch to perform_training. It is used as a stopping criterion.
void write_XML(tinyxml2::XMLPrinter &) const
const type & get_damping_parameter() const
Returns the damping parameter for the hessian approximation.
void set_maximum_damping_parameter(const type &)
const Index & get_maximum_selection_failures() const
Returns the maximum number of selection failures during the training process.
type maximum_damping_parameter
Maximum Levenberg-Marquardt parameter.
Index maximum_selection_failures
void set_damping_parameter_factor(const type &)
const type & get_minimum_loss_decrease() const
Returns the minimum loss improvement during training.
This abstract class represents the concept of loss index composed of an error term and a regularizati...
NeuralNetwork * get_neural_network_pointer() const
Returns a pointer to the neural network object associated to the error term.
Index get_parameters_number() const
LossIndex * get_loss_index_pointer() const
LevenbergMarquardtAlgorithmData()
Default constructor.
A loss index composed of several terms, this structure represent the First Order for this function.
This structure contains the optimization algorithm results.