optimization_algorithm.cpp
1// OpenNN: Open Neural Networks Library
2// www.opennn.net
3//
4// O P T I M I Z A T I O N A L G O R I T H M C L A S S
5//
6// Artificial Intelligence Techniques SL
7// artelnics@artelnics.com
8
9#include "optimization_algorithm.h"
10
11namespace OpenNN
12{
13
16
18 : loss_index_pointer(nullptr)
19{
20 const int n = omp_get_max_threads();
21 non_blocking_thread_pool = new NonBlockingThreadPool(n);
22 thread_pool_device = new ThreadPoolDevice(non_blocking_thread_pool, n);
23
25}
26
27
30
32 : loss_index_pointer(new_loss_index_pointer)
33{
34 const int n = omp_get_max_threads();
35 non_blocking_thread_pool = new NonBlockingThreadPool(n);
36 thread_pool_device = new ThreadPoolDevice(non_blocking_thread_pool, n);
37
39}
40
41
43
45{
46 delete non_blocking_thread_pool;
47 delete thread_pool_device;
48}
49
50
53
55{
56#ifdef OPENNN_DEBUG
57
59 {
60 ostringstream buffer;
61
62 buffer << "OpenNN Exception: OptimizationAlgorithm class.\n"
63 << "LossIndex* get_loss_index_pointer() const method.\n"
64 << "Loss index pointer is nullptr.\n";
65
66 throw logic_error(buffer.str());
67 }
68
69#endif
70
71 return loss_index_pointer;
72}
73
74
76
78{
79 return hardware_use;
80}
81
82
84
85void OptimizationAlgorithm::set_hardware_use(const string& new_hardware_use)
86{
87 hardware_use = new_hardware_use;
88}
89
90
93
95{
97 {
98 return true;
99 }
100 else
101 {
102 return false;
103 }
104}
105
106
109
111{
112 return display;
113}
114
115
117
119{
120 return display_period;
121}
122
123
125
127{
128 return save_period;
129}
130
131
133
135{
137}
138
139
142
144{
145 loss_index_pointer = nullptr;
146
147 set_default();
148}
149
150
151void OptimizationAlgorithm::set_threads_number(const int& new_threads_number)
152{
153 if(non_blocking_thread_pool != nullptr) delete this->non_blocking_thread_pool;
154 if(thread_pool_device != nullptr) delete this->thread_pool_device;
155
156 non_blocking_thread_pool = new NonBlockingThreadPool(new_threads_number);
157 thread_pool_device = new ThreadPoolDevice(non_blocking_thread_pool, new_threads_number);
158}
159
160
163
165{
166 loss_index_pointer = new_loss_index_pointer;
167}
168
169
174
175void OptimizationAlgorithm::set_display(const bool& new_display)
176{
177 display = new_display;
178}
179
180
184
185void OptimizationAlgorithm::set_display_period(const Index& new_display_period)
186{
187#ifdef OPENNN_DEBUG
188
189 if(new_display_period <= 0)
190 {
191 ostringstream buffer;
192
193 buffer << "OpenNN Exception: ConjugateGradient class.\n"
194 << "void set_display_period(const Index&) method.\n"
195 << "Display period must be greater than 0.\n";
196
197 throw logic_error(buffer.str());
198 }
199
200#endif
201
202 display_period = new_display_period;
203}
204
205
209
210void OptimizationAlgorithm::set_save_period(const Index& new_save_period)
211{
212#ifdef OPENNN_DEBUG
213
214 if(new_save_period <= 0)
215 {
216 ostringstream buffer;
217
218 buffer << "OpenNN Exception: ConjugateGradient class.\n"
219 << "void set_save_period(const Index&) method.\n"
220 << "Save period must be greater than 0.\n";
221
222 throw logic_error(buffer.str());
223 }
224
225#endif
226
227 save_period = new_save_period;
228}
229
230
234
235void OptimizationAlgorithm::set_neural_network_file_name(const string& new_neural_network_file_name)
236{
237 neural_network_file_name = new_neural_network_file_name;
238}
239
240
242
244{
245 display = true;
246
247 display_period = 10;
248
249 save_period = UINT_MAX;
250
251 neural_network_file_name = "neural_network.xml";
252}
253
254
259
261{
262#ifdef OPENNN_DEBUG
263
264 ostringstream buffer;
265
267 {
268 buffer << "OpenNN Exception: OptimizationAlgorithm class.\n"
269 << "void check() const method.\n"
270 << "Pointer to loss index is nullptr.\n";
271
272 throw logic_error(buffer.str());
273 }
274
275 const NeuralNetwork* neural_network_pointer = loss_index_pointer->get_neural_network_pointer();
276
277 if(neural_network_pointer == nullptr)
278 {
279 buffer << "OpenNN Exception: OptimizationAlgorithm class.\n"
280 << "void check() const method.\n"
281 << "Pointer to neural network is nullptr.\n";
282
283 throw logic_error(buffer.str());
284 }
285
286#endif
287}
288
289
292
294{
295 ostringstream buffer;
296
297 file_stream.OpenElement("OptimizationAlgorithm");
298
299 // Display
300
301 file_stream.OpenElement("Display");
302
303 buffer.str("");
304 buffer << display;
305
306 file_stream.PushText(buffer.str().c_str());
307
308 file_stream.CloseElement();
309
310 file_stream.CloseElement();
311}
312
313
316
318{
319 const tinyxml2::XMLElement* root_element = document.FirstChildElement("OptimizationAlgorithm");
320
321 if(!root_element)
322 {
323 ostringstream buffer;
324
325 buffer << "OpenNN Exception: OptimizationAlgorithm class.\n"
326 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
327 << "Optimization algorithm element is nullptr.\n";
328
329 throw logic_error(buffer.str());
330 }
331
332 // Display
333 {
334 const tinyxml2::XMLElement* display_element = root_element->FirstChildElement("Display");
335
336 if(display_element)
337 {
338 const string new_display_string = display_element->GetText();
339
340 try
341 {
342 set_display(new_display_string != "0");
343 }
344 catch(const logic_error& e)
345 {
346 cerr << e.what() << endl;
347 }
348 }
349 }
350}
351
352
355
357{
358 return Tensor<string, 2>();
359}
360
361
363
365{
366}
367
368
371
372void OptimizationAlgorithm::save(const string& file_name) const
373{
374 FILE * file = fopen(file_name.c_str(), "w");
375
376 tinyxml2::XMLPrinter printer(file);
377
378 write_XML(printer);
379
380 fclose(file);
381}
382
383
387
388void OptimizationAlgorithm::load(const string& file_name)
389{
390 set_default();
391
392 tinyxml2::XMLDocument document;
393
394 if(document.LoadFile(file_name.c_str()))
395 {
396 ostringstream buffer;
397
398 buffer << "OpenNN Exception: OptimizationAlgorithm class.\n"
399 << "void load(const string&) method.\n"
400 << "Cannot load XML file " << file_name << ".\n";
401
402 throw logic_error(buffer.str());
403 }
404
405 from_XML(document);
406}
407
408
410
412{
413 switch(stopping_condition)
414 {
415 case OptimizationAlgorithm::StoppingCondition::MinimumLossDecrease:
416 return "Minimum loss decrease";
417
418 case OptimizationAlgorithm::StoppingCondition::LossGoal:
419 return "Loss goal";
420
421 case OptimizationAlgorithm::StoppingCondition::MaximumSelectionErrorIncreases:
422 return "Maximum selection error increases";
423
424 case OptimizationAlgorithm::StoppingCondition::MaximumEpochsNumber:
425 return "Maximum number of epochs";
426
427 case OptimizationAlgorithm::StoppingCondition::MaximumTime:
428 return "Maximum training time";
429 }
430
431 return string();
432}
433
434
437
439{
440 if(training_error_history.size() == 0)
441 {
442 training_error_history.resize(new_size);
443
444 return;
445 }
446
447 const Tensor<type, 1> old_training_error_history = training_error_history;
448
449 training_error_history.resize(new_size);
450
451 for(Index i = 0; i < new_size; i++)
452 {
453 training_error_history(i) = old_training_error_history(i);
454 }
455}
456
457
460
462{
463 if(selection_error_history.size() == 0)
464 {
465 selection_error_history.resize(new_size);
466
467 return;
468 }
469
470 const Tensor<type, 1> old_selection_error_history = selection_error_history;
471
472 selection_error_history.resize(new_size);
473
474 for(Index i = 0; i < new_size; i++)
475 {
476 selection_error_history(i) = old_selection_error_history(i);
477 }
478}
479
480
482
483const string OptimizationAlgorithm::write_time(const type& time) const
484{
485
486#ifdef OPENNN_DEBUG
487
488 if(time > static_cast<type>(3600e5))
489 {
490 ostringstream buffer;
491
492 buffer << "OpenNN Exception: OptimizationAlgorithm class.\n"
493 << "const string write_time(const type& time) const method.\n"
494 << "Time must be lower than 10e5 seconds.\n";
495
496 throw logic_error(buffer.str());
497 }
498
499 if(time < static_cast<type>(0))
500 {
501 ostringstream buffer;
502
503 buffer << "OpenNN Exception: OptimizationAlgorithm class.\n"
504 << "const string write_time(const type& time) const method.\n"
505 << "Time must be greater than 0.\n";
506
507 throw logic_error(buffer.str());
508 }
509#endif
510
511 const int hours = static_cast<int>(time) / 3600;
512 int seconds = static_cast<int>(time) % 3600;
513 const int minutes = seconds / 60;
514 seconds = seconds % 60;
515
516 ostringstream elapsed_time;
517
518 elapsed_time << setfill('0') << setw(2) << hours << ":"
519 << setfill('0') << setw(2) << minutes << ":"
520 << setfill('0') << setw(2) << seconds;
521
522 return elapsed_time.str();
523}
524
525
527
528void TrainingResults::save(const string&) const
529{
530
531}
532
533
534Tensor<string, 2> TrainingResults::write_final_results(const Index& precision) const
535{
536 ostringstream buffer;
537
538 Tensor<string, 2> final_results(6, 2);
539
540 final_results(0,0) = "Training error";
541 final_results(1,0) = "Selection error";
542 final_results(2,0) = "Epochs number";
543 final_results(3,0) = "Elapsed time";
544 final_results(4,0) = "Stopping criterion";
545
546 const Index size = training_error_history.size();
547
548 if(size == 0)
549 {
550 final_results(0,1) = "NA";
551 final_results(1,1) = "NA";
552 final_results(2,1) = "NA";
553 final_results(3,1) = "NA";
554 final_results(4,1) = "NA";
555
556 return final_results;
557 }
558
559 // Final training error
560
561 buffer.str("");
562 buffer << setprecision(precision) << training_error_history(size-1);
563
564 final_results(0,1) = buffer.str();
565
566 // Final selection error
567
568 buffer.str("");
569
570 selection_error_history.size() == 0
571 ? buffer << "NAN"
572 : buffer << setprecision(precision) << selection_error_history(size-1);
573
574 final_results(1,1) = buffer.str();
575
576 // Epochs number
577
578 buffer.str("");
579 buffer << training_error_history.size()-1;
580
581 final_results(2,1) = buffer.str();
582
583 // Elapsed time
584
585 buffer.str("");
586 buffer << setprecision(precision) << elapsed_time;
587
588 final_results(3,1) = buffer.str();
589
590 // Stopping criteria
591
592 final_results(4,1) = write_stopping_condition();
593
594 return final_results;
595}
596
597}
598
599
600// OpenNN: Open Neural Networks Library.
601// Copyright(C) 2005-2021 Artificial Intelligence Techniques, SL.
602//
603// This library is free software; you can redistribute it and/or
604// modify it under the terms of the GNU Lesser General Public
605// License as published by the Free Software Foundation; either
606// version 2.1 of the License, or any later version.
607//
608// This library is distributed in the hope that it will be useful,
609// but WITHOUT ANY WARRANTY; without even the implied warranty of
610// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
611// Lesser General Public License for more details.
612
613// You should have received a copy of the GNU Lesser General Public
614// License along with this library; if not, write to the Free Software
615// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This abstract class represents the concept of loss index composed of an error term and a regularizati...
Definition: loss_index.h:48
NeuralNetwork * get_neural_network_pointer() const
Returns a pointer to the neural network object associated to the error term.
Definition: loss_index.h:70
string neural_network_file_name
Path where the neural network is saved.
virtual void set_loss_index_pointer(LossIndex *)
void set_hardware_use(const string &)
Set hardware to use. Default: Multi-core.
string get_hardware_use() const
Hardware use.
virtual void from_XML(const tinyxml2::XMLDocument &)
virtual void set_default()
Sets the members of the optimization algorithm object to their default values.
void set_neural_network_file_name(const string &)
LossIndex * loss_index_pointer
Pointer to a loss index for a neural network object.
bool display
Display messages to screen.
virtual Tensor< string, 2 > to_string_matrix() const
const Index & get_display_period() const
Returns the number of iterations between the training showing progress.
const string write_time(const type &) const
Writes the time from seconds in format HH:mm:ss.
Index save_period
Number of iterations between the training saving progress.
const string & get_neural_network_file_name() const
Returns the file name where the neural network will be saved.
const Index & get_save_period() const
Returns the number of iterations between the training saving progress.
virtual void print() const
Prints to the screen the XML-type representation of the optimization algorithm object.
virtual void set_display(const bool &)
virtual void write_XML(tinyxml2::XMLPrinter &) const
virtual ~OptimizationAlgorithm()
Destructor.
Index display_period
Number of iterations between the training showing progress.
void PushText(const char *text, bool cdata=false)
Add a text node.
Definition: tinyxml2.cpp:2878
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
Definition: tinyxml2.cpp:2834
Tensor< type, 1 > selection_error_history
History of the selection error over the training iterations.
Tensor< string, 2 > write_final_results(const Index &=3) const
Writes final results of the training.
void resize_training_error_history(const Index &)
Resizes the training error history keeping the values.
void save(const string &) const
Returns a string representation of the results structure.
OptimizationAlgorithm::StoppingCondition stopping_condition
Stopping condition of the algorithm.
void resize_selection_error_history(const Index &)
Resizes the selection error history keeping the values.
Tensor< type, 1 > training_error_history
History of the loss function loss over the training iterations.
string elapsed_time
Elapsed time of the training process.
string write_stopping_condition() const
Return a string with the stopping condition of the Results.