9#include "optimization_algorithm.h"
18 : loss_index_pointer(nullptr)
20 const int n = omp_get_max_threads();
21 non_blocking_thread_pool =
new NonBlockingThreadPool(n);
22 thread_pool_device =
new ThreadPoolDevice(non_blocking_thread_pool, n);
32 : loss_index_pointer(new_loss_index_pointer)
34 const int n = omp_get_max_threads();
35 non_blocking_thread_pool =
new NonBlockingThreadPool(n);
36 thread_pool_device =
new ThreadPoolDevice(non_blocking_thread_pool, n);
46 delete non_blocking_thread_pool;
47 delete thread_pool_device;
62 buffer <<
"OpenNN Exception: OptimizationAlgorithm class.\n"
63 <<
"LossIndex* get_loss_index_pointer() const method.\n"
64 <<
"Loss index pointer is nullptr.\n";
66 throw logic_error(buffer.str());
151void OptimizationAlgorithm::set_threads_number(
const int& new_threads_number)
153 if(non_blocking_thread_pool !=
nullptr)
delete this->non_blocking_thread_pool;
154 if(thread_pool_device !=
nullptr)
delete this->thread_pool_device;
156 non_blocking_thread_pool =
new NonBlockingThreadPool(new_threads_number);
157 thread_pool_device =
new ThreadPoolDevice(non_blocking_thread_pool, new_threads_number);
189 if(new_display_period <= 0)
191 ostringstream buffer;
193 buffer <<
"OpenNN Exception: ConjugateGradient class.\n"
194 <<
"void set_display_period(const Index&) method.\n"
195 <<
"Display period must be greater than 0.\n";
197 throw logic_error(buffer.str());
214 if(new_save_period <= 0)
216 ostringstream buffer;
218 buffer <<
"OpenNN Exception: ConjugateGradient class.\n"
219 <<
"void set_save_period(const Index&) method.\n"
220 <<
"Save period must be greater than 0.\n";
222 throw logic_error(buffer.str());
264 ostringstream buffer;
268 buffer <<
"OpenNN Exception: OptimizationAlgorithm class.\n"
269 <<
"void check() const method.\n"
270 <<
"Pointer to loss index is nullptr.\n";
272 throw logic_error(buffer.str());
277 if(neural_network_pointer ==
nullptr)
279 buffer <<
"OpenNN Exception: OptimizationAlgorithm class.\n"
280 <<
"void check() const method.\n"
281 <<
"Pointer to neural network is nullptr.\n";
283 throw logic_error(buffer.str());
295 ostringstream buffer;
297 file_stream.OpenElement(
"OptimizationAlgorithm");
301 file_stream.OpenElement(
"Display");
306 file_stream.
PushText(buffer.str().c_str());
323 ostringstream buffer;
325 buffer <<
"OpenNN Exception: OptimizationAlgorithm class.\n"
326 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
327 <<
"Optimization algorithm element is nullptr.\n";
329 throw logic_error(buffer.str());
338 const string new_display_string = display_element->GetText();
344 catch(
const logic_error& e)
346 cerr << e.what() << endl;
358 return Tensor<string, 2>();
374 FILE * file = fopen(file_name.c_str(),
"w");
394 if(document.LoadFile(file_name.c_str()))
396 ostringstream buffer;
398 buffer <<
"OpenNN Exception: OptimizationAlgorithm class.\n"
399 <<
"void load(const string&) method.\n"
400 <<
"Cannot load XML file " << file_name <<
".\n";
402 throw logic_error(buffer.str());
415 case OptimizationAlgorithm::StoppingCondition::MinimumLossDecrease:
416 return "Minimum loss decrease";
418 case OptimizationAlgorithm::StoppingCondition::LossGoal:
421 case OptimizationAlgorithm::StoppingCondition::MaximumSelectionErrorIncreases:
422 return "Maximum selection error increases";
424 case OptimizationAlgorithm::StoppingCondition::MaximumEpochsNumber:
425 return "Maximum number of epochs";
427 case OptimizationAlgorithm::StoppingCondition::MaximumTime:
428 return "Maximum training time";
451 for(Index i = 0; i < new_size; i++)
474 for(Index i = 0; i < new_size; i++)
488 if(time >
static_cast<type
>(3600e5))
490 ostringstream buffer;
492 buffer <<
"OpenNN Exception: OptimizationAlgorithm class.\n"
493 <<
"const string write_time(const type& time) const method.\n"
494 <<
"Time must be lower than 10e5 seconds.\n";
496 throw logic_error(buffer.str());
499 if(time <
static_cast<type
>(0))
501 ostringstream buffer;
503 buffer <<
"OpenNN Exception: OptimizationAlgorithm class.\n"
504 <<
"const string write_time(const type& time) const method.\n"
505 <<
"Time must be greater than 0.\n";
507 throw logic_error(buffer.str());
511 const int hours =
static_cast<int>(time) / 3600;
512 int seconds =
static_cast<int>(time) % 3600;
513 const int minutes = seconds / 60;
514 seconds = seconds % 60;
516 ostringstream elapsed_time;
518 elapsed_time << setfill(
'0') << setw(2) << hours <<
":"
519 << setfill(
'0') << setw(2) << minutes <<
":"
520 << setfill(
'0') << setw(2) << seconds;
522 return elapsed_time.str();
536 ostringstream buffer;
538 Tensor<string, 2> final_results(6, 2);
540 final_results(0,0) =
"Training error";
541 final_results(1,0) =
"Selection error";
542 final_results(2,0) =
"Epochs number";
543 final_results(3,0) =
"Elapsed time";
544 final_results(4,0) =
"Stopping criterion";
550 final_results(0,1) =
"NA";
551 final_results(1,1) =
"NA";
552 final_results(2,1) =
"NA";
553 final_results(3,1) =
"NA";
554 final_results(4,1) =
"NA";
556 return final_results;
564 final_results(0,1) = buffer.str();
574 final_results(1,1) = buffer.str();
581 final_results(2,1) = buffer.str();
588 final_results(3,1) = buffer.str();
594 return final_results;
This abstract class represents the concept of loss index composed of an error term and a regularizati...
NeuralNetwork * get_neural_network_pointer() const
Returns a pointer to the neural network object associated to the error term.
void set_display_period(const Index &)
string neural_network_file_name
Path where the neural network is saved.
virtual void set_loss_index_pointer(LossIndex *)
const bool & get_display() const
void set_hardware_use(const string &)
Set hardware to use. Default: Multi-core.
string get_hardware_use() const
Hardware use.
virtual void from_XML(const tinyxml2::XMLDocument &)
virtual void set_default()
Sets the members of the optimization algorithm object to their default values.
void set_neural_network_file_name(const string &)
LossIndex * loss_index_pointer
Pointer to a loss index for a neural network object.
virtual void check() const
void load(const string &)
bool display
Display messages to screen.
virtual Tensor< string, 2 > to_string_matrix() const
void set_save_period(const Index &)
const Index & get_display_period() const
Returns the number of iterations between the training showing progress.
const string write_time(const type &) const
Writes the time from seconds in format HH:mm:ss.
void save(const string &) const
Index save_period
Number of iterations between the training saving progress.
bool has_loss_index() const
const string & get_neural_network_file_name() const
Returns the file name where the neural network will be saved.
const Index & get_save_period() const
Returns the number of iterations between the training saving progress.
string hardware_use
Hardware use.
virtual void print() const
Prints to the screen the XML-type representation of the optimization algorithm object.
virtual void set_display(const bool &)
virtual void write_XML(tinyxml2::XMLPrinter &) const
virtual ~OptimizationAlgorithm()
Destructor.
LossIndex * get_loss_index_pointer() const
Index display_period
Number of iterations between the training showing progress.
void PushText(const char *text, bool cdata=false)
Add a text node.
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
Tensor< type, 1 > selection_error_history
History of the selection error over the training iterations.
Tensor< string, 2 > write_final_results(const Index &=3) const
Writes final results of the training.
void resize_training_error_history(const Index &)
Resizes the training error history keeping the values.
void save(const string &) const
Returns a string representation of the results structure.
OptimizationAlgorithm::StoppingCondition stopping_condition
Stopping condition of the algorithm.
void resize_selection_error_history(const Index &)
Resizes the selection error history keeping the values.
Tensor< type, 1 > training_error_history
History of the loss function loss over the training iterations.
string elapsed_time
Elapsed time of the training process.
string write_stopping_condition() const
Return a string with the stopping condition of the Results.