incremental_neurons.cpp
1 // OpenNN: Open Neural Networks Library
2 // www.opennn.net
3 //
4 // I N C R E M E N T A L N E U R O N S C L A S S
5 //
6 // Artificial Intelligence Techniques SL
7 // artelnics@artelnics.com
8 
9 #include "incremental_neurons.h"
10 
11 namespace OpenNN
12 {
13 
15 
18 {
19  set_default();
20 }
21 
22 
25 
27  : NeuronsSelection(new_training_strategy_pointer)
28 {
29  set_default();
30 }
31 
32 
35 
37  : NeuronsSelection(incremental_order_document)
38 {
39  from_XML(incremental_order_document);
40 }
41 
42 
45 
47  : NeuronsSelection(file_name)
48 {
49  load(file_name);
50 }
51 
52 
54 
56 {
57 }
58 
59 
61 
62 const size_t& IncrementalNeurons::get_step() const
63 {
64  return step;
65 }
66 
67 
69 
71 {
73 }
74 
75 
77 
79 {
80  step = 1;
81 
83 }
84 
85 
88 
89 void IncrementalNeurons::set_step(const size_t& new_step)
90 {
91 #ifdef __OPENNN_DEBUG__
92 
93  if(new_step <= 0)
94  {
95  ostringstream buffer;
96 
97  buffer << "OpenNN Exception: IncrementalNeurons class.\n"
98  << "void set_step(const size_t&) method.\n"
99  << "New_step(" << new_step << ") must be greater than 0.\n";
100 
101  throw logic_error(buffer.str());
102  }
103 
104  if(new_step > maximum_order-minimum_order)
105  {
106  ostringstream buffer;
107 
108  buffer << "OpenNN Exception: IncrementalNeurons class.\n"
109  << "void set_step(const size_t&) method.\n"
110  << "New_step must be less than the distance between maximum_order and minimum_order(" << maximum_order-minimum_order << ").\n";
111 
112  throw logic_error(buffer.str());
113  }
114 
115 #endif
116 
117  step = new_step;
118 }
119 
120 
123 
124 void IncrementalNeurons::set_maximum_selection_failures(const size_t& new_maximum_loss_failures)
125 {
126 #ifdef __OPENNN_DEBUG__
127 
128  if(new_maximum_loss_failures <= 0)
129  {
130  ostringstream buffer;
131 
132  buffer << "OpenNN Exception: IncrementalNeurons class.\n"
133  << "void set_maximum_selection_failures(const size_t&) method.\n"
134  << "Maximum selection failures must be greater than 0.\n";
135 
136  throw logic_error(buffer.str());
137  }
138 
139 #endif
140 
141  maximum_selection_failures = new_maximum_loss_failures;
142 }
143 
144 
146 
148 {
150 
151  if(display)
152  {
153  cout << "Performing Incremental neurons selection..." << endl;
154  cout.flush();
155  }
156 
157  // Neural network stuff
158 
160 
161  const size_t trainable_layers_number = neural_network->get_trainable_layers_number();
162 
163  const Vector<Layer*> trainable_layers_pointers = neural_network->get_trainable_layers_pointers();
164 
165  // Loss index stuff
166 
167  double prev_selection_error = numeric_limits<double>::max();
168 
169  Vector<double> optimal_parameters;
170 
171  double optimum_training_loss = 0.0;
172  double optimum_selection_error = 0.0;
173 
174  double current_training_loss = 0.0;
175  double current_selection_error = 0.0;
176 
177  Vector<double> current_parameters;
178 
179  // Optimization algorithm stuff
180 
181  size_t optimal_neurons_number = 0;
182 
183  size_t neurons_number = minimum_order;
184  size_t iterations = 0;
185  size_t selection_failures = 0;
186 
187  bool end = false;
188 
189  time_t beginning_time, current_time;
190  double elapsed_time = 0.0;
191 
192  time(&beginning_time);
193 
194  // Main loop
195 
196  for(size_t i = 0; i < maximum_order; i++)
197  {
198  // Calculate losses
199 
200  trainable_layers_pointers[trainable_layers_number-2]->set_neurons_number(neurons_number); // Fix
201  trainable_layers_pointers[trainable_layers_number-1]->set_inputs_number(neurons_number); // Fix
202 
203  // Loss index stuff
204 
205  double optimum_selection_error_trial = numeric_limits<double>::max();
206  double optimum_training_error_trial = numeric_limits<double>::max();
207  Vector<double> optimum_parameters_trial;
208 
209  for(size_t i = 0; i < trials_number; i++)
210  {
211  neural_network->randomize_parameters_normal();
212 
213  const OptimizationAlgorithm::Results optimization_algorithm_results = training_strategy_pointer->perform_training();
214 
215  const double current_training_error_trial = optimization_algorithm_results.final_training_error;
216  const double current_selection_error_trial = optimization_algorithm_results.final_selection_error;
217  const Vector<double> current_parameters_trial = optimization_algorithm_results.final_parameters;
218 
219  if(current_selection_error_trial < optimum_selection_error_trial)
220  {
221  optimum_training_error_trial = current_training_error_trial;
222  optimum_selection_error_trial = current_selection_error_trial;
223  optimum_parameters_trial = current_parameters_trial;
224  }
225 
226  if(display)
227  {
228  cout << "Trial number: " << i << endl;
229  cout << "Training error: " << current_training_error_trial << endl;
230  cout << "Selection error: " << current_selection_error_trial << endl;
231  cout << "Stopping condition: " << optimization_algorithm_results.write_stopping_condition() << endl << endl;
232  }
233  }
234 
235  current_training_loss = optimum_training_error_trial;
236  current_selection_error = optimum_selection_error_trial;
237  current_parameters = optimum_parameters_trial;
238 
239  time(&current_time);
240 
241  elapsed_time = difftime(current_time, beginning_time);
242 
243  results->neurons_data.push_back(neurons_number);
244 
246  {
247  results->training_loss_data.push_back(current_training_loss);
248  }
249 
251  {
252  results->selection_error_data.push_back(current_selection_error);
253  }
254 
255  if(iterations == 0
256  ||(optimum_selection_error > current_selection_error
257  && abs(optimum_selection_error - current_selection_error) > tolerance))
258  {
259  optimal_neurons_number = neurons_number;
260  optimum_training_loss = current_training_loss;
261  optimum_selection_error = current_selection_error;
262  optimal_parameters = current_parameters;
263  }
264  else if(prev_selection_error < current_selection_error)
265  {
266  selection_failures++;
267  }
268 
269  prev_selection_error = current_selection_error;
270  iterations++;
271 
272  // Stopping criteria
273 
274  if(elapsed_time >= maximum_time)
275  {
276  end = true;
277 
278  if(display) cout << "Maximum time reached." << endl;
279 
280  results->stopping_condition = IncrementalNeurons::MaximumTime;
281  }
282  else if(current_selection_error <= selection_error_goal)
283  {
284  end = true;
285 
286  if(display) cout << "Selection loss reached." << endl;
287 
288  results->stopping_condition = IncrementalNeurons::SelectionErrorGoal;
289  }
290  else if(iterations >= maximum_iterations_number)
291  {
292  end = true;
293 
294  if(display) cout << "Maximum number of iterations reached." << endl;
295 
296  results->stopping_condition = IncrementalNeurons::MaximumIterations;
297  }
298  else if(selection_failures >= maximum_selection_failures)
299  {
300  end = true;
301 
302  if(display) cout << "Maximum selection failures (" << selection_failures << ") reached." << endl;
303 
304  results->stopping_condition = IncrementalNeurons::MaximumSelectionFailures;
305  }
306  else if(neurons_number == maximum_order)
307  {
308  end = true;
309 
310  if(display) cout << "Algorithm finished." << endl;
311 
312  results->stopping_condition = IncrementalNeurons::AlgorithmFinished;
313  }
314 
315  if(display)
316  {
317  cout << "Iteration: " << iterations << endl
318  << "Hidden neurons number: " << neurons_number << endl
319  << "Training loss: " << current_training_loss << endl
320  << "Selection error: " << current_selection_error << endl
321  << "Elapsed time: " << write_elapsed_time(elapsed_time) << endl << endl;
322  }
323 
324  if(end) break;
325 
326  neurons_number++;
327  }
328 
329  if(display)
330  {
331  cout << endl
332  << "Optimal order: " << optimal_neurons_number << endl
333  << "Optimum selection error: " << optimum_selection_error << endl
334  << "Corresponding training loss: " << optimum_training_loss << endl;
335  }
336 
337  // Save neural network
338 
339 // neural_network->set_order(optimal_neurons_number);
340 // neural_network_pointer->set_layer_neurons_number(optimal_neurons_number);
341 
342  trainable_layers_pointers[trainable_layers_number-1]->set_inputs_number(optimal_neurons_number);
343  trainable_layers_pointers[trainable_layers_number-2]->set_neurons_number(optimal_neurons_number);
344 
345  neural_network->set_parameters(optimal_parameters);
346 
347  // Save results
348 
350  {
351  results->minimal_parameters = optimal_parameters;
352  }
353 
354  results->optimal_neurons_number = optimal_neurons_number;
355  results->final_selection_error = optimum_selection_error;
356  results->final_training_loss = optimum_training_loss;
357  results->iterations_number = iterations;
358  results->elapsed_time = elapsed_time;
359 
360  return results;
361 }
362 
363 
365 
367 {
368  ostringstream buffer;
369 
370  Vector<string> labels;
371  Vector<string> values;
372 
373  // Minimum order
374 
375  labels.push_back("Minimum order");
376 
377  buffer.str("");
378  buffer << minimum_order;
379 
380  values.push_back(buffer.str());
381 
382  // Maximum order
383 
384  labels.push_back("Maximum order");
385 
386  buffer.str("");
387  buffer << maximum_order;
388 
389  values.push_back(buffer.str());
390 
391  // Step
392 
393  labels.push_back("Step");
394 
395  buffer.str("");
396  buffer << step;
397 
398  values.push_back(buffer.str());
399 
400  // Trials number
401 
402  labels.push_back("Trials number");
403 
404  buffer.str("");
405  buffer << trials_number;
406 
407  values.push_back(buffer.str());
408 
409  // Tolerance
410 
411  labels.push_back("Tolerance");
412 
413  buffer.str("");
414  buffer << tolerance;
415 
416  values.push_back(buffer.str());
417 
418  // Selection loss goal
419 
420  labels.push_back("Selection loss goal");
421 
422  buffer.str("");
423  buffer << selection_error_goal;
424 
425  values.push_back(buffer.str());
426 
427  // Maximum selection failures
428 
429  labels.push_back("Maximum selection failures");
430 
431  buffer.str("");
432  buffer << maximum_selection_failures;
433 
434  values.push_back(buffer.str());
435 
436  // Maximum iterations number
437 
438  labels.push_back("Maximum iterations number");
439 
440  buffer.str("");
441  buffer << maximum_iterations_number;
442 
443  values.push_back(buffer.str());
444 
445  // Maximum time
446 
447  labels.push_back("Maximum time");
448 
449  buffer.str("");
450  buffer << maximum_time;
451 
452  values.push_back(buffer.str());
453 
454  // Plot training error history
455 
456  labels.push_back("Plot training error history");
457 
458  buffer.str("");
459 
461  {
462  buffer << "true";
463  }
464  else
465  {
466  buffer << "false";
467  }
468 
469  values.push_back(buffer.str());
470 
471  // Plot selection error history
472 
473  labels.push_back("Plot selection error history");
474 
475  buffer.str("");
476 
478  {
479  buffer << "true";
480  }
481  else
482  {
483  buffer << "false";
484  }
485 
486  values.push_back(buffer.str());
487 
488  const size_t rows_number = labels.size();
489  const size_t columns_number = 2;
490 
491  Matrix<string> string_matrix(rows_number, columns_number);
492 
493  string_matrix.set_column(0, labels, "name");
494  string_matrix.set_column(1, values, "value");
495 
496  return string_matrix;
497 }
498 
499 
502 
504 {
505  ostringstream buffer;
506 
508 
509  // Order Selection algorithm
510 
511  tinyxml2::XMLElement* root_element = document->NewElement("IncrementalNeurons");
512 
513  document->InsertFirstChild(root_element);
514 
515  tinyxml2::XMLElement* element = nullptr;
516  tinyxml2::XMLText* text = nullptr;
517 
518  // Minimum order
519  {
520  element = document->NewElement("MinimumOrder");
521  root_element->LinkEndChild(element);
522 
523  buffer.str("");
524  buffer << minimum_order;
525 
526  text = document->NewText(buffer.str().c_str());
527  element->LinkEndChild(text);
528  }
529 
530  // Maximum order
531  {
532  element = document->NewElement("MaximumOrder");
533  root_element->LinkEndChild(element);
534 
535  buffer.str("");
536  buffer << maximum_order;
537 
538  text = document->NewText(buffer.str().c_str());
539  element->LinkEndChild(text);
540  }
541 
542  // Step
543  {
544  element = document->NewElement("Step");
545  root_element->LinkEndChild(element);
546 
547  buffer.str("");
548  buffer << step;
549 
550  text = document->NewText(buffer.str().c_str());
551  element->LinkEndChild(text);
552  }
553 
554  // Parameters assays number
555  {
556  element = document->NewElement("TrialsNumber");
557  root_element->LinkEndChild(element);
558 
559  buffer.str("");
560  buffer << trials_number;
561 
562  text = document->NewText(buffer.str().c_str());
563  element->LinkEndChild(text);
564  }
565 
566  // Reserve minimal parameters
567 // {
568 // element = document->NewElement("ReserveMinimalParameters");
569 // root_element->LinkEndChild(element);
570 
571 // buffer.str("");
572 // buffer << reserve_minimal_parameters;
573 
574 // text = document->NewText(buffer.str().c_str());
575 // element->LinkEndChild(text);
576 // }
577 
578  // Display
579 // {
580 // element = document->NewElement("Display");
581 // root_element->LinkEndChild(element);
582 
583 // buffer.str("");
584 // buffer << display;
585 
586 // text = document->NewText(buffer.str().c_str());
587 // element->LinkEndChild(text);
588 // }
589 
590  // Tolerance
591  {
592  element = document->NewElement("Tolerance");
593  root_element->LinkEndChild(element);
594 
595  buffer.str("");
596  buffer << tolerance;
597 
598  text = document->NewText(buffer.str().c_str());
599  element->LinkEndChild(text);
600  }
601 
602  // selection error goal
603  {
604  element = document->NewElement("SelectionErrorGoal");
605  root_element->LinkEndChild(element);
606 
607  buffer.str("");
608  buffer << selection_error_goal;
609 
610  text = document->NewText(buffer.str().c_str());
611  element->LinkEndChild(text);
612  }
613 
614  // Maximum iterations
615 // {
616 // element = document->NewElement("MaximumEpochsNumber");
617 // root_element->LinkEndChild(element);
618 
619 // buffer.str("");
620 // buffer << maximum_iterations_number;
621 
622 // text = document->NewText(buffer.str().c_str());
623 // element->LinkEndChild(text);
624 // }
625 
626  // Maximum selection failures
627  {
628  element = document->NewElement("MaximumSelectionFailures");
629  root_element->LinkEndChild(element);
630 
631  buffer.str("");
632  buffer << maximum_selection_failures;
633 
634  text = document->NewText(buffer.str().c_str());
635  element->LinkEndChild(text);
636  }
637 
638  // Maximum time
639  {
640  element = document->NewElement("MaximumTime");
641  root_element->LinkEndChild(element);
642 
643  buffer.str("");
644  buffer << maximum_time;
645 
646  text = document->NewText(buffer.str().c_str());
647  element->LinkEndChild(text);
648  }
649 
650  // Reserve loss data
651  {
652  element = document->NewElement("ReserveTrainingErrorHistory");
653  root_element->LinkEndChild(element);
654 
655  buffer.str("");
656  buffer << reserve_error_data;
657 
658  text = document->NewText(buffer.str().c_str());
659  element->LinkEndChild(text);
660  }
661 
662  // Reserve selection error data
663  {
664  element = document->NewElement("ReserveSelectionErrorHistory");
665  root_element->LinkEndChild(element);
666 
667  buffer.str("");
669 
670  text = document->NewText(buffer.str().c_str());
671  element->LinkEndChild(text);
672  }
673 
674  return document;
675 }
676 
677 
680 
682 {
683  ostringstream buffer;
684 
685  //file_stream.OpenElement("IncrementalNeurons");
686 
687  // Minimum order
688 
689  file_stream.OpenElement("MinimumOrder");
690 
691  buffer.str("");
692  buffer << minimum_order;
693 
694  file_stream.PushText(buffer.str().c_str());
695 
696  file_stream.CloseElement();
697 
698  // Maximum order
699 
700  file_stream.OpenElement("MaximumOrder");
701 
702  buffer.str("");
703  buffer << maximum_order;
704 
705  file_stream.PushText(buffer.str().c_str());
706 
707  file_stream.CloseElement();
708 
709  // Step
710 
711  file_stream.OpenElement("Step");
712 
713  buffer.str("");
714  buffer << step;
715 
716  file_stream.PushText(buffer.str().c_str());
717 
718  file_stream.CloseElement();
719 
720  // Parameters assays number
721 
722  file_stream.OpenElement("TrialsNumber");
723 
724  buffer.str("");
725  buffer << trials_number;
726 
727  file_stream.PushText(buffer.str().c_str());
728 
729  file_stream.CloseElement();
730 
731  // Tolerance
732 
733  file_stream.OpenElement("Tolerance");
734 
735  buffer.str("");
736  buffer << tolerance;
737 
738  file_stream.PushText(buffer.str().c_str());
739 
740  file_stream.CloseElement();
741 
742  // selection error goal
743 
744  file_stream.OpenElement("SelectionErrorGoal");
745 
746  buffer.str("");
747  buffer << selection_error_goal;
748 
749  file_stream.PushText(buffer.str().c_str());
750 
751  file_stream.CloseElement();
752 
753  // Maximum selection failures
754 
755  file_stream.OpenElement("MaximumSelectionFailures");
756 
757  buffer.str("");
758  buffer << maximum_selection_failures;
759 
760  file_stream.PushText(buffer.str().c_str());
761 
762  file_stream.CloseElement();
763 
764  // Maximum time
765 
766  file_stream.OpenElement("MaximumTime");
767 
768  buffer.str("");
769  buffer << maximum_time;
770 
771  file_stream.PushText(buffer.str().c_str());
772 
773  file_stream.CloseElement();
774 
775  // Reserve loss data
776 
777  file_stream.OpenElement("ReserveTrainingErrorHistory");
778 
779  buffer.str("");
780  buffer << reserve_error_data;
781 
782  file_stream.PushText(buffer.str().c_str());
783 
784  file_stream.CloseElement();
785 
786  // Reserve selection error data
787 
788  file_stream.OpenElement("ReserveSelectionErrorHistory");
789 
790  buffer.str("");
792 
793  file_stream.PushText(buffer.str().c_str());
794 
795  file_stream.CloseElement();
796 
797 
798  //file_stream.CloseElement();
799 }
800 
801 
804 
806 {
807  const tinyxml2::XMLElement* root_element = document.FirstChildElement("IncrementalNeurons");
808 
809  if(!root_element)
810  {
811  ostringstream buffer;
812 
813  buffer << "OpenNN Exception: IncrementalNeurons class.\n"
814  << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
815  << "IncrementalNeurons element is nullptr.\n";
816 
817  throw logic_error(buffer.str());
818  }
819 
820  // Minimum order
821  {
822  const tinyxml2::XMLElement* element = root_element->FirstChildElement("MinimumOrder");
823 
824  if(element)
825  {
826  const size_t new_minimum_order = static_cast<size_t>(atoi(element->GetText()));
827 
828  try
829  {
830  minimum_order = new_minimum_order;
831  }
832  catch(const logic_error& e)
833  {
834  cerr << e.what() << endl;
835  }
836  }
837  }
838 
839  // Maximum order
840  {
841  const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumOrder");
842 
843  if(element)
844  {
845  const size_t new_maximum_order = static_cast<size_t>(atoi(element->GetText()));
846 
847  try
848  {
849  maximum_order = new_maximum_order;
850  }
851  catch(const logic_error& e)
852  {
853  cerr << e.what() << endl;
854  }
855  }
856  }
857 
858  // Step
859  {
860  const tinyxml2::XMLElement* element = root_element->FirstChildElement("Step");
861 
862  if(element)
863  {
864  const size_t new_step = static_cast<size_t>(atoi(element->GetText()));
865 
866  try
867  {
868  set_step(new_step);
869  }
870  catch(const logic_error& e)
871  {
872  cerr << e.what() << endl;
873  }
874  }
875  }
876 
877  // Parameters assays number
878  {
879  const tinyxml2::XMLElement* element = root_element->FirstChildElement("TrialsNumber");
880 
881  if(element)
882  {
883  const size_t new_trials_number = static_cast<size_t>(atoi(element->GetText()));
884 
885  try
886  {
887  set_trials_number(new_trials_number);
888  }
889  catch(const logic_error& e)
890  {
891  cerr << e.what() << endl;
892  }
893  }
894  }
895 
896  // Performance calculation method
897  {
898  const tinyxml2::XMLElement* element = root_element->FirstChildElement("LossCalculationMethod");
899 
900  if(element)
901  {
902  const string new_loss_calculation_method = element->GetText();
903 
904  try
905  {
906 // set_loss_calculation_method(new_loss_calculation_method);
907  }
908  catch(const logic_error& e)
909  {
910  cerr << e.what() << endl;
911  }
912  }
913  }
914 
915  // Reserve loss data
916  {
917  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveTrainingErrorHistory");
918 
919  if(element)
920  {
921  const string new_reserve_error_data = element->GetText();
922 
923  try
924  {
925  set_reserve_error_data(new_reserve_error_data != "0");
926  }
927  catch(const logic_error& e)
928  {
929  cerr << e.what() << endl;
930  }
931  }
932  }
933 
934  // Reserve selection error data
935  {
936  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveSelectionErrorHistory");
937 
938  if(element)
939  {
940  const string new_reserve_selection_error_data = element->GetText();
941 
942  try
943  {
944  set_reserve_selection_error_data(new_reserve_selection_error_data != "0");
945  }
946  catch(const logic_error& e)
947  {
948  cerr << e.what() << endl;
949  }
950  }
951  }
952 
953  // Reserve minimal parameters
954  {
955  const tinyxml2::XMLElement* element = root_element->FirstChildElement("ReserveMinimalParameters");
956 
957  if(element)
958  {
959  const string new_reserve_minimal_parameters = element->GetText();
960 
961  try
962  {
963  set_reserve_minimal_parameters(new_reserve_minimal_parameters != "0");
964  }
965  catch(const logic_error& e)
966  {
967  cerr << e.what() << endl;
968  }
969  }
970  }
971 
972  // Display
973  {
974  const tinyxml2::XMLElement* element = root_element->FirstChildElement("Display");
975 
976  if(element)
977  {
978  const string new_display = element->GetText();
979 
980  try
981  {
982  set_display(new_display != "0");
983  }
984  catch(const logic_error& e)
985  {
986  cerr << e.what() << endl;
987  }
988  }
989  }
990 
991  // selection error goal
992  {
993  const tinyxml2::XMLElement* element = root_element->FirstChildElement("SelectionErrorGoal");
994 
995  if(element)
996  {
997  const double new_selection_error_goal = atof(element->GetText());
998 
999  try
1000  {
1001  set_selection_error_goal(new_selection_error_goal);
1002  }
1003  catch(const logic_error& e)
1004  {
1005  cerr << e.what() << endl;
1006  }
1007  }
1008  }
1009 
1010  // Maximum iterations number
1011  {
1012  const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumEpochsNumber");
1013 
1014  if(element)
1015  {
1016  const size_t new_maximum_iterations_number = static_cast<size_t>(atoi(element->GetText()));
1017 
1018  try
1019  {
1020  set_maximum_iterations_number(new_maximum_iterations_number);
1021  }
1022  catch(const logic_error& e)
1023  {
1024  cerr << e.what() << endl;
1025  }
1026  }
1027  }
1028 
1029  // Maximum time
1030  {
1031  const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumTime");
1032 
1033  if(element)
1034  {
1035  const double new_maximum_time = atoi(element->GetText());
1036 
1037  try
1038  {
1039  set_maximum_time(new_maximum_time);
1040  }
1041  catch(const logic_error& e)
1042  {
1043  cerr << e.what() << endl;
1044  }
1045  }
1046  }
1047 
1048  // Tolerance
1049  {
1050  const tinyxml2::XMLElement* element = root_element->FirstChildElement("Tolerance");
1051 
1052  if(element)
1053  {
1054  const double new_tolerance = atof(element->GetText());
1055 
1056  try
1057  {
1058  set_tolerance(new_tolerance);
1059  }
1060  catch(const logic_error& e)
1061  {
1062  cerr << e.what() << endl;
1063  }
1064  }
1065  }
1066 
1067  // Maximum selection failures
1068  {
1069  const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumSelectionFailures");
1070 
1071  if(element)
1072  {
1073  const size_t new_maximum_selection_failures = static_cast<size_t>(atoi(element->GetText()));
1074 
1075  try
1076  {
1077  set_maximum_selection_failures(new_maximum_selection_failures);
1078  }
1079  catch(const logic_error& e)
1080  {
1081  cerr << e.what() << endl;
1082  }
1083  }
1084  }
1085 }
1086 
1087 
1090 
1091 void IncrementalNeurons::save(const string& file_name) const
1092 {
1093  tinyxml2::XMLDocument* document = to_XML();
1094 
1095  document->SaveFile(file_name.c_str());
1096 
1097  delete document;
1098 }
1099 
1100 
1103 
1104 void IncrementalNeurons::load(const string& file_name)
1105 {
1106  set_default();
1107 
1108  tinyxml2::XMLDocument document;
1109 
1110  if(document.LoadFile(file_name.c_str()))
1111  {
1112  ostringstream buffer;
1113 
1114  buffer << "OpenNN Exception: IncrementalNeurons class.\n"
1115  << "void load(const string&) method.\n"
1116  << "Cannot load XML file " << file_name << ".\n";
1117 
1118  throw logic_error(buffer.str());
1119  }
1120 
1121  from_XML(document);
1122 }
1123 
1124 
1125 
1126 }
1127 
1128 // OpenNN: Open Neural Networks Library.
1129 // Copyright(C) 2005-2019 Artificial Intelligence Techniques, SL.
1130 //
1131 // This library is free software; you can redistribute it and/or
1132 // modify it under the terms of the GNU Lesser General Public
1133 // License as published by the Free Software Foundation; either
1134 // version 2.1 of the License, or any later version.
1135 //
1136 // This library is distributed in the hope that it will be useful,
1137 // but WITHOUT ANY WARRANTY; without even the implied warranty of
1138 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1139 // Lesser General Public License for more details.
1140 
1141 // You should have received a copy of the GNU Lesser General Public
1142 // License along with this library; if not, write to the Free Software
1143 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
OpenNN::Matrix::set_column
void set_column(const size_t &, const Vector< T > &, const string &="")
Definition: matrix.h:3133
OpenNN::NeuralNetwork::randomize_parameters_normal
void randomize_parameters_normal(const double &=0.0, const double &=1.0)
Definition: neural_network.cpp:1081
OpenNN::TrainingStrategy::get_neural_network_pointer
NeuralNetwork * get_neural_network_pointer() const
Returns a pointer to the NeuralNetwork class.
Definition: training_strategy.cpp:109
OpenNN::NeuronsSelection::Results::training_loss_data
Vector< double > training_loss_data
Performance of the different neural networks.
Definition: neurons_selection.h:84
OpenNN::NeuronsSelection::Results::final_selection_error
double final_selection_error
Value of minimum selection error.
Definition: neurons_selection.h:96
OpenNN::NeuronsSelection::set_reserve_minimal_parameters
void set_reserve_minimal_parameters(const bool &)
Definition: neurons_selection.cpp:359
OpenNN::OptimizationAlgorithm::Results
This structure contains the optimization algorithm results.
Definition: optimization_algorithm.h:58
OpenNN::IncrementalNeurons::IncrementalNeuronsResults
This structure contains the training results for the incremental order method.
Definition: incremental_neurons.h:58
OpenNN::NeuronsSelection::set_selection_error_goal
void set_selection_error_goal(const double &)
Definition: neurons_selection.cpp:379
tinyxml2::XMLText
Definition: tinyxml2.h:976
OpenNN::NeuralNetwork
This class represents the concept of neural network in the OpenNN library.
Definition: neural_network.h:51
OpenNN::IncrementalNeurons::step
size_t step
Number of neurons added at each iteration.
Definition: incremental_neurons.h:107
OpenNN::NeuronsSelection::set_reserve_selection_error_data
void set_reserve_selection_error_data(const bool &)
Definition: neurons_selection.cpp:350
OpenNN::NeuronsSelection::trials_number
size_t trials_number
Number of trials for each neural network.
Definition: neurons_selection.h:207
OpenNN::IncrementalNeurons::set_step
void set_step(const size_t &)
Definition: incremental_neurons.cpp:89
OpenNN::NeuronsSelection::display
bool display
Display messages to screen.
Definition: neurons_selection.h:225
OpenNN::NeuronsSelection::maximum_order
size_t maximum_order
Maximum number of hidden neurons.
Definition: neurons_selection.h:203
OpenNN::IncrementalNeurons::perform_neurons_selection
IncrementalNeuronsResults * perform_neurons_selection()
Perform the neurons selection with the Incremental method.
Definition: incremental_neurons.cpp:147
OpenNN::OptimizationAlgorithm::Results::final_selection_error
double final_selection_error
Final selection error.
Definition: optimization_algorithm.h:118
OpenNN::NeuronsSelection::minimum_order
size_t minimum_order
Minimum number of hidden neurons.
Definition: neurons_selection.h:199
OpenNN::NeuronsSelection::set_maximum_time
void set_maximum_time(const double &)
Definition: neurons_selection.cpp:427
OpenNN::NeuronsSelection::selection_error_goal
double selection_error_goal
Goal value for the selection error. It is used as a stopping criterion.
Definition: neurons_selection.h:229
OpenNN::NeuronsSelection::Results::final_training_loss
double final_training_loss
Value of loss for the neural network with minimum selection error.
Definition: neurons_selection.h:100
tinyxml2::XMLPrinter::PushText
void PushText(const char *text, bool cdata=false)
Add a text node.
Definition: tinyxml2.cpp:2627
OpenNN::NeuronsSelection::Results::stopping_condition
StoppingCondition stopping_condition
Stopping condition of the algorithm.
Definition: neurons_selection.h:112
tinyxml2::XMLDocument
Definition: tinyxml2.h:1649
OpenNN::NeuronsSelection::Results::optimal_neurons_number
size_t optimal_neurons_number
Order of the neural network with minimum selection error.
Definition: neurons_selection.h:104
OpenNN::Matrix
This template class defines a matrix for general purpose use.
Definition: matrix.h:42
OpenNN::NeuronsSelection::Results::elapsed_time
double elapsed_time
Elapsed time during the loss of the algortihm.
Definition: neurons_selection.h:116
OpenNN::IncrementalNeurons::get_step
const size_t & get_step() const
Returns the number of the hidden perceptrons pointed in each iteration of the Incremental algorithm.
Definition: incremental_neurons.cpp:62
OpenNN::NeuronsSelection::reserve_minimal_parameters
bool reserve_minimal_parameters
True if the vector parameters of the neural network presenting minimum selection error is to be reser...
Definition: neurons_selection.h:221
OpenNN::NeuronsSelection::training_strategy_pointer
TrainingStrategy * training_strategy_pointer
Pointer to a training strategy object.
Definition: neurons_selection.h:181
OpenNN::IncrementalNeurons::IncrementalNeurons
IncrementalNeurons()
Default constructor.
Definition: incremental_neurons.cpp:16
OpenNN::OptimizationAlgorithm::Results::final_parameters
Vector< double > final_parameters
Final neural network parameters vector.
Definition: optimization_algorithm.h:106
OpenNN::NeuralNetwork::get_trainable_layers_pointers
Vector< Layer * > get_trainable_layers_pointers() const
Returns a pointer to the trainable layers object composing this neural network object.
Definition: neural_network.cpp:345
OpenNN::IncrementalNeurons::from_XML
void from_XML(const tinyxml2::XMLDocument &)
Definition: incremental_neurons.cpp:805
OpenNN::NeuronsSelection::tolerance
double tolerance
Tolerance for the error in the trainings of the algorithm.
Definition: neurons_selection.h:241
OpenNN::NeuronsSelection::set_tolerance
void set_tolerance(const double &)
Definition: neurons_selection.cpp:451
OpenNN::TrainingStrategy
This class represents the concept of training strategy for a neural network in OpenNN.
Definition: training_strategy.h:56
OpenNN::NeuronsSelection::set_maximum_iterations_number
void set_maximum_iterations_number(const size_t &)
Definition: neurons_selection.cpp:403
OpenNN::NeuronsSelection::reserve_selection_error_data
bool reserve_selection_error_data
True if the selection error of all neural networks are to be reserved.
Definition: neurons_selection.h:217
OpenNN::NeuronsSelection::maximum_iterations_number
size_t maximum_iterations_number
Maximum number of iterations to perform_neurons_selection. It is used as a stopping criterion.
Definition: neurons_selection.h:233
OpenNN::NeuronsSelection
This abstract class represents the concept of neurons selection algorithm for a ModelSelection[1].
Definition: neurons_selection.h:40
tinyxml2::XMLPrinter
Definition: tinyxml2.h:2150
tinyxml2::XMLPrinter::CloseElement
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
Definition: tinyxml2.cpp:2589
OpenNN::NeuronsSelection::Results::neurons_data
Vector< size_t > neurons_data
Order of the diferent neural networks.
Definition: neurons_selection.h:80
OpenNN::IncrementalNeurons::load
void load(const string &)
Definition: incremental_neurons.cpp:1104
OpenNN::NeuronsSelection::set_trials_number
void set_trials_number(const size_t &)
Definition: neurons_selection.cpp:318
OpenNN::IncrementalNeurons::save
void save(const string &) const
Definition: incremental_neurons.cpp:1091
OpenNN::NeuronsSelection::Results::selection_error_data
Vector< double > selection_error_data
Selection loss of the different neural networks.
Definition: neurons_selection.h:88
OpenNN::IncrementalNeurons::~IncrementalNeurons
virtual ~IncrementalNeurons()
Destructor.
Definition: incremental_neurons.cpp:55
OpenNN::NeuronsSelection::set_reserve_error_data
void set_reserve_error_data(const bool &)
Definition: neurons_selection.cpp:341
OpenNN::OptimizationAlgorithm::Results::final_training_error
double final_training_error
Final loss function evaluation.
Definition: optimization_algorithm.h:114
OpenNN::IncrementalNeurons::to_string_matrix
Matrix< string > to_string_matrix() const
Writes as matrix of strings the most representative atributes.
Definition: incremental_neurons.cpp:366
OpenNN::NeuronsSelection::set_display
void set_display(const bool &)
Definition: neurons_selection.cpp:370
OpenNN::Vector
This template represents an array of any kind of numbers or objects.
Definition: vector.h:54
OpenNN::NeuralNetwork::set_parameters
void set_parameters(const Vector< double > &)
Definition: neural_network.cpp:945
OpenNN::IncrementalNeurons::set_maximum_selection_failures
void set_maximum_selection_failures(const size_t &)
Definition: incremental_neurons.cpp:124
OpenNN::NeuronsSelection::Results::iterations_number
size_t iterations_number
Number of iterations to perform the order selection.
Definition: neurons_selection.h:108
OpenNN::TrainingStrategy::perform_training
OptimizationAlgorithm::Results perform_training() const
Definition: training_strategy.cpp:1018
OpenNN::IncrementalNeurons::maximum_selection_failures
size_t maximum_selection_failures
Maximum number of iterations at which the selection error increases.
Definition: incremental_neurons.h:111
OpenNN::IncrementalNeurons::to_XML
tinyxml2::XMLDocument * to_XML() const
Definition: incremental_neurons.cpp:503
tinyxml2::XMLElement
Definition: tinyxml2.h:1239
OpenNN::OptimizationAlgorithm::Results::write_stopping_condition
string write_stopping_condition() const
Return a string with the stopping condition of the Results.
Definition: optimization_algorithm.cpp:448
OpenNN::NeuronsSelection::reserve_error_data
bool reserve_error_data
True if the loss of all neural networks are to be reserved.
Definition: neurons_selection.h:213
OpenNN::NeuronsSelection::maximum_time
double maximum_time
Maximum selection algorithm time. It is used as a stopping criterion.
Definition: neurons_selection.h:237
OpenNN::IncrementalNeurons::set_default
void set_default()
Sets the members of the model selection object to their default values:
Definition: incremental_neurons.cpp:78
OpenNN::IncrementalNeurons::get_maximum_selection_failures
const size_t & get_maximum_selection_failures() const
Returns the maximum number of selection failures in the model order selection algorithm.
Definition: incremental_neurons.cpp:70
OpenNN::NeuronsSelection::Results::minimal_parameters
Vector< double > minimal_parameters
Vector of parameters for the neural network with minimum selection error.
Definition: neurons_selection.h:92
OpenNN::IncrementalNeurons::write_XML
void write_XML(tinyxml2::XMLPrinter &) const
Definition: incremental_neurons.cpp:681