genetic_algorithm.cpp
1// OpenNN: Open Neural Networks Library
2// www.opennn.net
3//
4// G E N E T I C A L G O R I T H M C L A S S
5//
6// Artificial Intelligence Techniques SL
7// artelnics@artelnics.com
8
9#include "genetic_algorithm.h"
10
11namespace OpenNN
12{
13
15
18{
20}
21
22
25
27 : InputsSelection(new_training_strategy_pointer)
28{
30}
31
32
34
36{
37}
38
39
41
42const Tensor<bool, 2>& GeneticAlgorithm::get_population() const
43{
44 return population;
45}
46
47
49
50const Tensor<type, 1>& GeneticAlgorithm::get_fitness() const
51{
52 return fitness;
53}
54
55
56const Tensor<bool, 1>& GeneticAlgorithm::get_selection() const
57{
58 return selection;
59}
60
61
63
65{
66 return population.dimension(0);
67}
68
69
70Index GeneticAlgorithm::get_genes_number() const
71{
72 return population.dimension(1);
73}
74
75
77
79{
80 return mutation_rate;
81}
82
83
85
87{
88 return elitism_size;
89}
90
91
93
95{
96 const Index genes_number = get_genes_number();
97 Index individuals_number;
98
100
101 mutation_rate = static_cast<type>(0.1);
102
103 individuals_number = 10;
104
105 // Population stuff
106
107 population.resize(individuals_number, genes_number);
108
109 parameters.resize(individuals_number);
110 for(Index i = 0; i < individuals_number; i++) parameters(i).resize(genes_number);
111
112 training_errors.resize(individuals_number);
113 selection_errors.resize(individuals_number);
114
115 fitness.resize(individuals_number);
116 fitness.setConstant(type(-1.0));
117
118 selection.resize(individuals_number);
119
120 // Training operators
121
122 elitism_size = 2;
123}
124
125
128
129void GeneticAlgorithm::set_population(const Tensor<bool, 2>& new_population)
130{
131#ifdef OPENNN_DEBUG
132
133 const Index individuals_number = get_individuals_number();
134 const Index new_individuals_number = new_population.dimension(2);
135
136 // Optimization algorithm
137
138 ostringstream buffer;
139
141 {
142 buffer << "OpenNN Exception: InputsSelection class.\n"
143 << "void check() const method.\n"
144 << "Pointer to training strategy is nullptr.\n";
145
146 throw logic_error(buffer.str());
147 }
148
149 // Loss index
150
151 const LossIndex* loss_index_pointer = training_strategy_pointer->get_loss_index_pointer();
152
153 if(!loss_index_pointer)
154 {
155 buffer << "OpenNN Exception: InputsSelection class.\n"
156 << "void check() const method.\n"
157 << "Pointer to loss index is nullptr.\n";
158
159 throw logic_error(buffer.str());
160 }
161
162 // Neural network
163
164 const NeuralNetwork* neural_network_pointer = loss_index_pointer->get_neural_network_pointer();
165
166 if(!neural_network_pointer)
167 {
168 buffer << "OpenNN Exception: InputsSelection class.\n"
169 << "void check() const method.\n"
170 << "Pointer to neural network is nullptr.\n";
171
172 throw logic_error(buffer.str());
173 }
174
175 if(new_individuals_number != individuals_number)
176 {
177 ostringstream buffer;
178
179 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
180 << "void set_population(const Tensor<type, 2>&) method.\n"
181 << "Population rows("<<new_individuals_number
182 << ") must be equal to population size("<<individuals_number<<").\n";
183
184 throw logic_error(buffer.str());
185 }
186
187#endif
188
189 population = new_population;
190}
191
192
193void GeneticAlgorithm::set_training_errors(const Tensor<type, 1>& new_training_errors)
194{
195 training_errors = new_training_errors;
196}
197
198
199void GeneticAlgorithm::set_selection_errors(const Tensor<type, 1>& new_selection_errors)
200{
201 selection_errors = new_selection_errors;
202}
203
204
207
208void GeneticAlgorithm::set_fitness(const Tensor<type, 1>& new_fitness)
209{
210#ifdef OPENNN_DEBUG
211
212 const Index individuals_number = get_individuals_number();
213
214 if(new_fitness.size() != individuals_number)
215 {
216 ostringstream buffer;
217
218 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
219 << "void set_fitness(const Tensor<type, 1>&) method.\n"
220 << "Fitness size ("<<new_fitness.size()
221 << ") must be equal to population size ("<< individuals_number <<").\n";
222
223 throw logic_error(buffer.str());
224 }
225
226 for(Index i = 0; i < individuals_number; i++)
227 {
228 if(new_fitness[i] < 0)
229 {
230 ostringstream buffer;
231
232 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
233 << "void set_fitness(const Tensor<type, 2>&) method.\n"
234 << "Fitness must be greater than 0.\n";
235
236 throw logic_error(buffer.str());
237 }
238 }
239
240#endif
241
242 fitness = new_fitness;
243}
244
245
248
249void GeneticAlgorithm::set_individuals_number(const Index& new_individuals_number)
250{
251#ifdef OPENNN_DEBUG
252
253 if(new_individuals_number < 4)
254 {
255 ostringstream buffer;
256
257 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
258 << "void set_individuals_number(const Index&) method.\n"
259 << "Population size must be greater than 4.\n";
260
261 throw logic_error(buffer.str());
262 }
263
264#endif
265
267
268 population.resize(new_individuals_number, new_genes_number);
269
270 parameters.resize(new_individuals_number);
271
272 training_errors.resize(new_individuals_number);
273 selection_errors.resize(new_individuals_number);
274
275 fitness.resize(new_individuals_number);
276 fitness.setConstant(type(-1.0));
277
278 selection.resize(new_individuals_number);
279
280 if(elitism_size > new_individuals_number) elitism_size = new_individuals_number;
281}
282
283
287
288void GeneticAlgorithm::set_mutation_rate(const type& new_mutation_rate)
289{
290#ifdef OPENNN_DEBUG
291
292 if(new_mutation_rate < 0 || new_mutation_rate > 1)
293 {
294 ostringstream buffer;
295
296 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
297 << "void set_mutation_rate(const type&) method.\n"
298 << "Mutation rate must be between 0 and 1.\n";
299
300 throw logic_error(buffer.str());
301 }
302
303#endif
304
305 mutation_rate = new_mutation_rate;
306}
307
308
311
312void GeneticAlgorithm::set_elitism_size(const Index& new_elitism_size)
313{
314#ifdef OPENNN_DEBUG
315
316 const Index individuals_number = get_individuals_number();
317
318 if(new_elitism_size > individuals_number)
319 {
320 ostringstream buffer;
321
322 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
323 << "void set_elitism_size(const Index&) method.\n"
324 << "Elitism size("<< new_elitism_size
325 <<") must be lower than the population size("<<individuals_number<<").\n";
326
327 throw logic_error(buffer.str());
328 }
329
330#endif
331
332 elitism_size = new_elitism_size;
333}
334
335
337
339{
340 const Index individuals_number = get_individuals_number();
341
342#ifdef OPENNN_DEBUG
343
344 if(individuals_number == 0)
345 {
346 ostringstream buffer;
347
348 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
349 << "void initialize_population() method.\n"
350 << "Population size must be greater than 0.\n";
351
352 throw logic_error(buffer.str());
353 }
354
355#endif
356
357 const Index genes_number = get_genes_number();
358
359 Tensor<bool, 1> individual(genes_number);
360
361 for(Index i = 0; i < individuals_number; i++)
362 {
363 for(Index j = 0; j < genes_number; j++)
364 {
365 rand()%2 == 0 ? individual[j] = false : individual[j] = true;
366 }
367
368 // Prevent no inputs
369
370 if(is_false(individual))
371 {
372 individual(static_cast<Index>(rand())%genes_number) = true;
373 }
374
375 for(Index j = 0; j < genes_number; j++)
376 {
377 population(i,j) = individual(j);
378 }
379 }
380}
381
382
385
387{
388#ifdef OPENNN_DEBUG
389
390 check();
391
392 if(population.size() == 0)
393 {
394 ostringstream buffer;
395
396 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
397 << "void evaluate_population() method.\n"
398 << "Population size must be greater than 0.\n";
399
400 throw logic_error(buffer.str());
401 }
402
403#endif
404
405 // Training strategy
406
407 TrainingResults training_results;
408
409 // Loss index
410
411 const LossIndex* loss_index_pointer = training_strategy_pointer->get_loss_index_pointer();
412
413 // Data set
414
415 DataSet* data_set_pointer = loss_index_pointer->get_data_set_pointer();
416
417 Tensor<string, 1> inputs_names;
418
419 // Neural network
420
421 NeuralNetwork* neural_network_pointer = loss_index_pointer->get_neural_network_pointer();
422
423 // Optimization algorithm
424
425 Tensor<bool, 1> individual;
426
427 // Model selection
428
429 const Index individuals_number = get_individuals_number();
430 const Index genes_number = get_genes_number();
431
432 for(Index i = 0; i < individuals_number; i++)
433 {
434 individual = population.chip(i, 0);
435
436 if(display) cout << "Individual " << i+1 << endl;
437
438 const Tensor<Index, 0> input_columns_number = individual.cast<Index>().sum();
439
440 Tensor<Index, 1> input_columns_indices(input_columns_number(0));
441
442 Index index = 0;
443
444 for(Index j = 0; j < genes_number; j++)
445 {
446 if(individual(j))
447 {
448 input_columns_indices(index) = original_input_columns_indices(j);
449 index++;
450 }
451 }
452
453 data_set_pointer->set_input_target_columns(input_columns_indices, original_target_columns_indices);
454
455 inputs_names = data_set_pointer->get_input_variables_names();
456
457 neural_network_pointer->set_inputs_number(data_set_pointer->get_input_variables_number());
458
459 neural_network_pointer->set_inputs_names(inputs_names);
460
461 neural_network_pointer->set_parameters_random();
462
463 training_results = training_strategy_pointer->perform_training();
464
465 // Set stuff
466
467 parameters(i) = neural_network_pointer->get_parameters();
468
469 training_errors(i) = training_results.get_training_error();
470 selection_errors(i) = training_results.get_selection_error();
471
472 if(display)
473 {
474 cout << "Inputs: " << endl;
475
476 const Tensor<string, 1> inputs_names = neural_network_pointer->get_inputs_names();
477
478 for(Index i = 0; i < inputs_names.size(); i++) cout << " " << inputs_names(i) << endl;
479
480 cout << "Training error: " << training_results.get_training_error() << endl;
481 cout << "Selection error: " << training_results.get_selection_error() << endl;
482 }
483 }
484}
485
486
488
490{
491 const Index individuals_number = get_individuals_number();
492
493 const Tensor<Index, 1> rank = calculate_rank_greater(selection_errors);
494
495 for(Index i = 0; i < individuals_number; i++)
496 {
497 fitness(rank(i)) = type(i+1);
498 }
499}
500
501
503
505{
506#ifdef OPENNN_DEBUG
507
508 if(population.size() == 0)
509 {
510 ostringstream buffer;
511
512 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
513 << "void perform_selection() method.\n"
514 << "Population size must be greater than 0.\n";
515
516 throw logic_error(buffer.str());
517 }
518
519 if(fitness.dimension(0) == 0)
520 {
521 ostringstream buffer;
522
523 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
524 << "void perform_selection() method.\n"
525 << "No fitness found.\n";
526
527 throw logic_error(buffer.str());
528 }
529
530#endif
531
532 selection.setConstant(false);
533
534 const Index individuals_number = get_individuals_number();
535
536 const Index selected_individuals_number = static_cast<Index>((individuals_number-elitism_size)/2);
537
538 const Tensor<type, 1> cumulative_fitness = fitness.cumsum(0);
539
540 Tensor<Index, 1> fitness_rank = calculate_rank_greater(fitness);
541
542 Index selection_count = 0;
543
544 // Roulette wheel selection
545
546 do
547 {
548 const type pointer = static_cast<type>(rand()/(RAND_MAX+1.0))*cumulative_fitness(individuals_number-1);
549
550 if(pointer < cumulative_fitness(0) && !selection(0))
551 {
552 selection(0) = true;
553 selection_count++;
554 continue;
555 }
556
557 for(Index i = 1; i < individuals_number; i++)
558 {
559 if(cumulative_fitness(i-1) < pointer
560 && pointer < cumulative_fitness(i)
561 && !selection(i))
562 {
563 selection(i) = true;
564 selection_count++;
565 break;
566 }
567 }
568
569 }while(selection_count < selected_individuals_number);
570
571#ifdef OPENNN_DEBUG
572
573 Index selection_assert = 0;
574 for(Index i = 0; i < individuals_number; i++) if(selection(i)) selection_assert++;
575
576 if(selection_assert != individuals_number/2)
577 {
578 ostringstream buffer;
579
580 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
581 << "void perform_selection() method.\n"
582 << "Number of selected individuals (" << selection_assert << ") must be " << individuals_number/2 << " .\n";
583
584 throw logic_error(buffer.str());
585 }
586
587#endif
588
589}
590
591
593
595{
596 const Index individuals_number = get_individuals_number();
597 const Index genes_number = get_genes_number();
598
599#ifdef OPENNN_DEBUG
600
601 const Index selected_individuals_number = individuals_number/2;
602
603 Index count_selected_individuals = 0;
604 for(Index i = 0; i < individuals_number; i++) if(selection(i)) count_selected_individuals++;
605
606 if(selected_individuals_number != count_selected_individuals)
607 {
608 ostringstream buffer;
609
610 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
611 << "void perform_crossover() method.\n"
612 << "Selected individuals number is wrong.\n";
613
614 throw logic_error(buffer.str());
615 }
616
617#endif
618
619 Index parent_1_index = 0;
620 Index parent_2_index = 0;
621
622 Tensor<bool, 1> parent_1(genes_number);
623 Tensor<bool, 1> parent_2(genes_number);
624
625 Tensor<bool, 1> offspring_1(genes_number);
626 Tensor<bool, 1> offspring_2(genes_number);
627
628 Index offspring_count = 0;
629
630 Tensor<bool, 2> new_population(individuals_number, genes_number);
631
632 Index new_individual_index = 0;
633
634 Tensor<Index, 1> fitness_rank = calculate_rank_greater(fitness);
635
636 for(Index i = 0; i < elitism_size ; i++)
637 {
638 new_individual_index = fitness_rank(i);
639
640 for(Index j = 0; j < genes_number; j++)
641 {
642 new_population(offspring_count, j) = population(new_individual_index, j);
643 }
644
645 offspring_count++;
646 }
647
648 for(Index i = 0; i < individuals_number; i++)
649 {
650 if(offspring_count > individuals_number-1) break;
651
652 if(!selection(i)) continue;
653
654 parent_1_index = i;
655
656 do{
657 parent_2_index = static_cast<Index>(rand())%individuals_number;
658 }while(selection(parent_2_index) && parent_1_index != parent_2_index);
659
660 parent_1 = population.chip(parent_1_index, 0);
661 parent_2 = population.chip(parent_2_index, 0);
662
663 for(Index j = 0; j < genes_number; j++)
664 {
665 if(rand()%2 == 0)
666 {
667 offspring_1(j) = parent_1[j];
668 offspring_2(j) = parent_2[j];
669 }
670 else
671 {
672 offspring_1(j) = parent_2[j];
673 offspring_2(j) = parent_1[j];
674 }
675 }
676
677 // Prevent no inputs
678
679 if(is_false(offspring_1))
680 offspring_1(static_cast<Index>(rand())%genes_number) = true;
681
682 if(is_false(offspring_2))
683 offspring_2(static_cast<Index>(rand())%genes_number) = true;
684
685 for(Index j = 0; j < genes_number; j++)
686 {
687 new_population(offspring_count, j) = offspring_1(j);
688
689 // In case of even population size, this checks the size
690
691 if(offspring_count + 1 <= individuals_number-1) new_population(offspring_count+1, j) = offspring_2(j);
692 }
693
694 offspring_count += 2;
695 }
696
697 // In case of odd population size, fill the last offspring of the new population
698
699 if(offspring_count == individuals_number-1)
700 {
701 for(Index i = 0; i < individuals_number; i++)
702 {
703 if(!selection(i)) continue;
704
705 parent_1_index = i;
706
707 do{
708 parent_2_index = static_cast<Index>(rand())%individuals_number;
709 }while(selection(parent_2_index) && parent_1_index != parent_2_index);
710
711 parent_1 = population.chip(parent_1_index, 0);
712 parent_2 = population.chip(parent_2_index, 0);
713
714 for(Index j = 0; j < genes_number; j++)
715 {
716 if(rand()%2 == 0)
717 {
718 offspring_1(j) = parent_1[j];
719 }
720 else
721 {
722 offspring_1(j) = parent_2[j];
723 }
724 }
725
726 if(is_false(offspring_1))
727 offspring_1(static_cast<Index>(rand())%genes_number) = true;
728
729 for(Index j = 0; j < genes_number; j++)
730 {
731 new_population(offspring_count, j) = offspring_1(j);
732 }
733 }
734 }
735
736 population = new_population;
737}
738
739
741
743{
744 const Index individuals_number = get_individuals_number();
745
746 const Index genes_number = get_genes_number();
747
748 Tensor<bool, 1> individual(genes_number);
749
750 for(Index i = 0; i < individuals_number; i++)
751 {
752 for(Index j = 0; j < genes_number; j++)
753 {
754 if(static_cast<type>(rand()/(RAND_MAX+1.0)) <= mutation_rate)
755 population(i,j) = !population(i,j);
756 }
757
758 // Prevent no inputs
759
760 individual = population.chip(i, 0);
761
762 if(is_false(individual))
763 {
764 individual(static_cast<Index>(rand())%genes_number) = true;
765
766 for(Index j = 0; j < genes_number; j++)
767 population(i, j) = individual(j);
768 }
769 }
770}
771
772
774
776{
777#ifdef OPENNN_DEBUG
778
779 check();
780
781#endif
782
783 if(population.size() == 0) set_individuals_number(10);
784
785 if(display) cout << "Performing genetic inputs selection..." << endl << endl;
786
787 InputsSelectionResults inputs_selection_results(maximum_epochs_number);
788
789 // Training strategy
790
792
793 // Loss index
794
795 const LossIndex* loss_index_pointer = training_strategy_pointer->get_loss_index_pointer();
796
797 // Data set
798
799 DataSet* data_set_pointer = loss_index_pointer->get_data_set_pointer();
800
801 original_input_columns_indices = data_set_pointer->get_input_columns_indices();
802 original_target_columns_indices = data_set_pointer->get_target_columns_indices();
803
804 // Neural network
805
806 NeuralNetwork* neural_network_pointer = loss_index_pointer->get_neural_network_pointer();
807
808 // Optimization algorithm
809
810 Index optimal_individual_index;
811
812 bool stop = false;
813
814 time_t beginning_time, current_time;
815 type elapsed_time = type(0);
816
817 time(&beginning_time);
818
820
821 for(Index epoch = 0; epoch < maximum_epochs_number; epoch++)
822 {
823 if(display) cout << "Generation: " << epoch + 1 << endl;
824
826
827 optimal_individual_index = minimal_index(selection_errors);
828
829 inputs_selection_results.training_error_history(epoch) = training_errors(optimal_individual_index);
830 inputs_selection_results.selection_error_history(epoch) = selection_errors(optimal_individual_index);
831
832 if(selection_errors(optimal_individual_index) < inputs_selection_results.optimum_selection_error)
833 {
834 // Neural network
835
836 inputs_selection_results.optimal_inputs = population.chip(optimal_individual_index, 0);
837
838 data_set_pointer->set_input_columns(original_input_columns_indices, inputs_selection_results.optimal_inputs);
839
840 inputs_selection_results.optimal_input_columns_names = data_set_pointer->get_input_columns_names();
841
842 inputs_selection_results.optimal_parameters = parameters(optimal_individual_index);
843
844 // Loss index
845
846 inputs_selection_results.optimum_training_error = training_errors(optimal_individual_index);
847
848 inputs_selection_results.optimum_selection_error = selection_errors(optimal_individual_index);
849 }
850
851 time(&current_time);
852
853 elapsed_time = static_cast<type>(difftime(current_time, beginning_time));
854
855 if(display)
856 {
857 cout << endl;
858
859 cout << "Generation mean training error: " << training_errors.mean() << endl;
860 cout << "Generation mean selection error: " << selection_errors.mean() << endl;
861
862 cout << "Generation minimum training error: " << training_errors(optimal_individual_index) << endl;
863 cout << "Generation minimum selection error: " << selection_errors(optimal_individual_index) << endl;
864
865 cout << "Best ever training error: " << inputs_selection_results.optimum_training_error << endl;
866 cout << "Best ever selection error: " << inputs_selection_results.optimum_selection_error << endl;
867
868 cout << "Elapsed time: " << write_time(elapsed_time) << endl;
869 }
870
871 // Stopping criteria
872
873 if(elapsed_time >= maximum_time)
874 {
875 stop = true;
876
877 if(display) cout << "Epoch " << epoch << endl << "Maximum time reached: " << write_time(elapsed_time) << endl;
878
879 inputs_selection_results.stopping_condition = InputsSelection::StoppingCondition::MaximumTime;
880 }
881
882 if(selection_errors(optimal_individual_index) <= selection_error_goal) //???
883 {
884 stop = true;
885
886 if(display) cout << "Epoch " << epoch << endl << "Selection error reached: " << selection_errors(optimal_individual_index) << endl;
887
888 inputs_selection_results.stopping_condition = InputsSelection::StoppingCondition::SelectionErrorGoal;
889 }
890
891 if(epoch >= maximum_epochs_number-1)
892 {
893 stop = true;
894
895 if(display) cout << "Epoch " << epoch << endl << "Maximum number of epochs reached: " << epoch << endl;
896
897 inputs_selection_results.stopping_condition = InputsSelection::StoppingCondition::MaximumEpochs;
898 }
899
900 if(stop)
901 {
902 inputs_selection_results.elapsed_time = write_time(elapsed_time);
903
904 inputs_selection_results.resize_history(epoch+1);
905
906 break;
907 }
908
910
912
914
916 }
917
918 // Set data set stuff
919
920 data_set_pointer->set_input_columns(original_input_columns_indices, inputs_selection_results.optimal_inputs);
921
922 const Tensor<Scaler, 1> input_variables_scalers = data_set_pointer->get_input_variables_scalers();
923
924 const Tensor<Descriptives, 1> input_variables_descriptives = data_set_pointer->calculate_input_variables_descriptives();
925
926 // Set neural network stuff
927
928 neural_network_pointer->set_inputs_number(data_set_pointer->get_input_variables_number());
929
930 neural_network_pointer->set_inputs_names(data_set_pointer->get_input_variables_names());
931
932 if(neural_network_pointer->has_scaling_layer())
933 neural_network_pointer->get_scaling_layer_pointer()->set(input_variables_descriptives, input_variables_scalers);
934
935 neural_network_pointer->set_parameters(inputs_selection_results.optimal_parameters);
936
937 if(display) inputs_selection_results.print();
938
939 return inputs_selection_results;
940}
941
942
945
946Tensor<string, 2> GeneticAlgorithm::to_string_matrix() const
947{
948 Tensor<string, 2> string_matrix(8, 2);
949
950 const Index individuals_number = get_individuals_number();
951
952 ostringstream buffer;
953
954 Tensor<string, 1> labels(6);
955 Tensor<string, 1> values(6);
956
957 // Population size
958
959 labels(0) = "Population size";
960 values(0) = to_string(individuals_number);
961
962 // Elitism size
963
964 labels(1) = "Elitism size";
965 values(1) = to_string(elitism_size);
966
967 // Mutation rate
968
969 labels(2) = "Mutation rate";
970 values(2) = to_string(double(mutation_rate));
971
972 // Selection loss goal
973
974 labels(3) = "Selection loss goal";
975 values(3) = to_string(double(selection_error_goal));
976
977 // Maximum Generations number
978
979 labels(4) = "Maximum Generations number";
980 values(4) = to_string(maximum_epochs_number);
981
982 // Maximum time
983
984 labels(5) = "Maximum time";
985 values(5) = to_string(double(maximum_time));
986
987 string_matrix.chip(0, 1) = labels;
988 string_matrix.chip(1, 1) = values;
989
990 return string_matrix;
991}
992
993
997
999{
1000 const Index individuals_number = get_individuals_number();
1001
1002 ostringstream buffer;
1003
1004 file_stream.OpenElement("GeneticAlgorithm");
1005
1006 // Population size
1007
1008 file_stream.OpenElement("PopulationSize");
1009
1010 buffer.str("");
1011 buffer << individuals_number;
1012
1013 file_stream.PushText(buffer.str().c_str());
1014
1015 file_stream.CloseElement();
1016
1017 // Elitism size
1018
1019 file_stream.OpenElement("ElitismSize");
1020
1021 buffer.str("");
1022 buffer << elitism_size;
1023
1024 file_stream.PushText(buffer.str().c_str());
1025
1026 file_stream.CloseElement();
1027
1028 // Mutation rate
1029
1030 file_stream.OpenElement("MutationRate");
1031
1032 buffer.str("");
1033 buffer << mutation_rate;
1034
1035 file_stream.PushText(buffer.str().c_str());
1036
1037 file_stream.CloseElement();
1038
1039 // selection error goal
1040
1041 file_stream.OpenElement("SelectionErrorGoal");
1042
1043 buffer.str("");
1044 buffer << selection_error_goal;
1045
1046 file_stream.PushText(buffer.str().c_str());
1047
1048 file_stream.CloseElement();
1049
1050 // Maximum iterations
1051
1052 file_stream.OpenElement("MaximumGenerationsNumber");
1053
1054 buffer.str("");
1055 buffer << maximum_epochs_number;
1056
1057 file_stream.PushText(buffer.str().c_str());
1058
1059 file_stream.CloseElement();
1060
1061 // Maximum time
1062
1063 file_stream.OpenElement("MaximumTime");
1064
1065 buffer.str("");
1066 buffer << maximum_time;
1067
1068 file_stream.PushText(buffer.str().c_str());
1069
1070 file_stream.CloseElement();
1071
1072 file_stream.CloseElement();
1073}
1074
1075
1078
1080{
1081 const tinyxml2::XMLElement* root_element = document.FirstChildElement("GeneticAlgorithm");
1082
1083 if(!root_element)
1084 {
1085 ostringstream buffer;
1086
1087 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
1088 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1089 << "GeneticAlgorithm element is nullptr.\n";
1090
1091 throw logic_error(buffer.str());
1092 }
1093
1094 // Population size
1095 {
1096 const tinyxml2::XMLElement* element = root_element->FirstChildElement("PopulationSize");
1097
1098 if(element)
1099 {
1100 const Index new_population_size = static_cast<Index>(atoi(element->GetText()));
1101
1102 try
1103 {
1104 set_individuals_number(new_population_size);
1105 }
1106 catch(const logic_error& e)
1107 {
1108 cerr << e.what() << endl;
1109 }
1110 }
1111 }
1112
1113 // Mutation rate
1114 {
1115 const tinyxml2::XMLElement* element = root_element->FirstChildElement("MutationRate");
1116
1117 if(element)
1118 {
1119 const type new_mutation_rate = static_cast<type>(atof(element->GetText()));
1120
1121 try
1122 {
1123 set_mutation_rate(new_mutation_rate);
1124 }
1125 catch(const logic_error& e)
1126 {
1127 cerr << e.what() << endl;
1128 }
1129 }
1130 }
1131
1132 // Elitism size
1133 {
1134 const tinyxml2::XMLElement* element = root_element->FirstChildElement("ElitismSize");
1135
1136 if(element)
1137 {
1138 const Index new_elitism_size = static_cast<Index>(atoi(element->GetText()));
1139
1140 try
1141 {
1142 set_elitism_size(new_elitism_size);
1143 }
1144 catch(const logic_error& e)
1145 {
1146 cerr << e.what() << endl;
1147 }
1148 }
1149 }
1150
1151 // Display
1152 {
1153 const tinyxml2::XMLElement* element = root_element->FirstChildElement("Display");
1154
1155 if(element)
1156 {
1157 const string new_display = element->GetText();
1158
1159 try
1160 {
1161 set_display(new_display != "0");
1162 }
1163 catch(const logic_error& e)
1164 {
1165 cerr << e.what() << endl;
1166 }
1167 }
1168 }
1169
1170 // selection error goal
1171 {
1172 const tinyxml2::XMLElement* element = root_element->FirstChildElement("SelectionErrorGoal");
1173
1174 if(element)
1175 {
1176 const type new_selection_error_goal = static_cast<type>(atof(element->GetText()));
1177
1178 try
1179 {
1180 set_selection_error_goal(new_selection_error_goal);
1181 }
1182 catch(const logic_error& e)
1183 {
1184 cerr << e.what() << endl;
1185 }
1186 }
1187 }
1188
1189 // Maximum iterations number
1190 {
1191 const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumGenerationsNumber");
1192
1193 if(element)
1194 {
1195 const Index new_maximum_epochs_number = static_cast<Index>(atoi(element->GetText()));
1196
1197 try
1198 {
1199 set_maximum_epochs_number(new_maximum_epochs_number);
1200 }
1201 catch(const logic_error& e)
1202 {
1203 cerr << e.what() << endl;
1204 }
1205 }
1206 }
1207
1208 // Maximum correlation
1209 {
1210 const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumCorrelation");
1211
1212 if(element)
1213 {
1214 const type new_maximum_correlation = static_cast<type>(atof(element->GetText()));
1215
1216 try
1217 {
1218 set_maximum_correlation(new_maximum_correlation);
1219 }
1220 catch(const logic_error& e)
1221 {
1222 cerr << e.what() << endl;
1223 }
1224 }
1225 }
1226
1227 // Minimum correlation
1228 {
1229 const tinyxml2::XMLElement* element = root_element->FirstChildElement("MinimumCorrelation");
1230
1231 if(element)
1232 {
1233 const type new_minimum_correlation = static_cast<type>(atof(element->GetText()));
1234
1235 try
1236 {
1237 set_minimum_correlation(new_minimum_correlation);
1238 }
1239 catch(const logic_error& e)
1240 {
1241 cerr << e.what() << endl;
1242 }
1243 }
1244 }
1245
1246 // Maximum time
1247 {
1248 const tinyxml2::XMLElement* element = root_element->FirstChildElement("MaximumTime");
1249
1250 if(element)
1251 {
1252 const type new_maximum_time = type(atoi(element->GetText()));
1253
1254 try
1255 {
1256 set_maximum_time(new_maximum_time);
1257 }
1258 catch(const logic_error& e)
1259 {
1260 cerr << e.what() << endl;
1261 }
1262 }
1263 }
1264}
1265
1266
1267void GeneticAlgorithm::print() const
1268{
1269 cout << "Genetic algorithm" << endl;
1270 cout << "Individuals number: " << get_individuals_number() << endl;
1271 cout << "Genes number: " << get_genes_number() << endl;
1272}
1273
1274
1277
1278void GeneticAlgorithm::save(const string& file_name) const
1279{
1280 FILE * file = fopen(file_name.c_str(), "w");
1281
1282 tinyxml2::XMLPrinter printer(file);
1283
1284 write_XML(printer);
1285
1286 fclose(file);
1287}
1288
1289
1292
1293void GeneticAlgorithm::load(const string& file_name)
1294{
1295 set_default();
1296
1297 tinyxml2::XMLDocument document;
1298
1299 if(document.LoadFile(file_name.c_str()))
1300 {
1301 ostringstream buffer;
1302
1303 buffer << "OpenNN Exception: GeneticAlgorithm class.\n"
1304 << "void load(const string&) method.\n"
1305 << "Cannot load XML file " << file_name << ".\n";
1306
1307 throw logic_error(buffer.str());
1308 }
1309
1310 from_XML(document);
1311}
1312
1313}
1314
1315// OpenNN: Open Neural Networks Library.
1316// Copyright(C) 2005-2021 Artificial Intelligence Techniques, SL.
1317//
1318// This library is free software; you can redistribute it and/or
1319// modify it under the terms of the GNU Lesser General Public
1320// License as published by the Free Software Foundation; either
1321// version 2.1 of the License, or any later version.
1322//
1323// This library is distributed in the hope that it will be useful,
1324// but WITHOUT ANY WARRANTY; without even the implied warranty of
1325// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1326// Lesser General Public License for more details.
1327
1328// You should have received a copy of the GNU Lesser General Public
1329// License along with this library; if not, write to the Free Software
1330// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This class represents the concept of a data set for data modelling problems, such as approximation,...
Definition: data_set.h:56
Tensor< Descriptives, 1 > calculate_input_variables_descriptives() const
Definition: data_set.cpp:5677
Tensor< Index, 1 > get_target_columns_indices() const
Returns a indices vector with the positions of the targets.
Definition: data_set.cpp:2319
Tensor< Index, 1 > get_input_columns_indices() const
Returns a indices vector with the positions of the inputs.
Definition: data_set.cpp:2275
Index get_input_variables_number() const
Definition: data_set.cpp:2863
Tensor< string, 1 > get_input_columns_names() const
Returns a string vector that contains the names of the columns whose uses are Input.
Definition: data_set.cpp:2509
Tensor< string, 1 > get_input_variables_names() const
Definition: data_set.cpp:2188
Index get_input_columns_number() const
Returns the number of columns whose uses are Input.
Definition: data_set.cpp:2579
Tensor< bool, 2 > population
Population matrix.
void set_individuals_number(const Index &)
void set_fitness(const Tensor< type, 1 > &)
void from_XML(const tinyxml2::XMLDocument &)
void set_default()
Sets the members of the genetic algorithm object to their default values.
void set_elitism_size(const Index &)
Tensor< Tensor< type, 1 >, 1 > parameters
Performance of population.
virtual ~GeneticAlgorithm()
Destructor.
Tensor< string, 2 > to_string_matrix() const
InputsSelectionResults perform_inputs_selection()
Select the inputs with best generalization properties using the genetic algorithm.
GeneticAlgorithm()
Default constructor.
const Index & get_elitism_size() const
Returns the size of the elite in the selection.
void perform_fitness_assignment()
Calculate the fitness with the errors depending on the fitness assignment method.
void perform_selection()
Selects for crossover some individuals from the population.
void perform_mutation()
Perform the mutation of the individuals generated in the crossover.
void save(const string &) const
Index get_individuals_number() const
Returns the size of the population.
void set_mutation_rate(const type &)
void write_XML(tinyxml2::XMLPrinter &) const
const type & get_mutation_rate() const
Returns the rate used in the mutation.
const Tensor< type, 1 > & get_fitness() const
Returns the fitness of the population.
const Tensor< bool, 2 > & get_population() const
Returns the population matrix.
Tensor< type, 1 > fitness
Fitness of population.
void set_population(const Tensor< bool, 2 > &)
void initialize_population()
Initialize the population depending on the intialization method.
void perform_crossover()
Perform the crossover depending on the crossover method.
This abstract class represents the concept of inputs selection algorithm for a ModelSelection.
TrainingStrategy * training_strategy_pointer
Pointer to a training strategy object.
void set_selection_error_goal(const type &)
void check() const
Checks that the different pointers needed for performing the inputs selection are not nullptr.
void set_maximum_correlation(const type &)
bool display
Display messages to screen.
type selection_error_goal
Goal value for the selection error. It is used as a stopping criterion.
void set_maximum_time(const type &)
const string write_time(const type &) const
Writes the time from seconds in format HH:mm:ss.
type maximum_time
Maximum selection algorithm time. It is used as a stopping criterion.
void set_maximum_epochs_number(const Index &)
Index maximum_epochs_number
Maximum number of epochs to perform_inputs_selection. It is used as a stopping criterion.
void set_display(const bool &)
void set_minimum_correlation(const type &)
This abstract class represents the concept of loss index composed of an error term and a regularizati...
Definition: loss_index.h:49
NeuralNetwork * get_neural_network_pointer() const
Returns a pointer to the neural network object associated to the error term.
Definition: loss_index.h:71
DataSet * get_data_set_pointer() const
Returns a pointer to the data set object associated to the error term.
Definition: loss_index.h:93
This class represents the concept of neural network in the OpenNN library.
ScalingLayer * get_scaling_layer_pointer() const
Returns a pointer to the scaling layers object composing this neural network object.
bool has_scaling_layer() const
const Tensor< string, 1 > & get_inputs_names() const
Returns a string vector with the names of the variables used as inputs.
void set_inputs_number(const Index &)
void set_parameters(Tensor< type, 1 > &)
void set_inputs_names(const Tensor< string, 1 > &)
Tensor< type, 1 > get_parameters() const
void set()
Sets the scaling layer to be empty.
This class represents the concept of training strategy for a neural network in OpenNN.
TrainingResults perform_training()
LossIndex * get_loss_index_pointer()
Returns a pointer to the LossIndex class.
void set_display(const bool &)
DataSet * get_data_set_pointer()
Returns a pointer to the DataSet class.
void PushText(const char *text, bool cdata=false)
Add a text node.
Definition: tinyxml2.cpp:2878
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
Definition: tinyxml2.cpp:2834
This structure contains the results from the inputs selection.
Tensor< string, 1 > optimal_input_columns_names
Inputs of the neural network with minimum selection error.
type optimum_training_error
Value of training for the neural network with minimum selection error.
Tensor< type, 1 > selection_error_history
Final selection errors of the different neural networks.
Tensor< type, 1 > optimal_parameters
Vector of parameters for the neural network with minimum selection error.
InputsSelection::StoppingCondition stopping_condition
Stopping condition of the algorithm.
type optimum_selection_error
Value of minimum selection error.
Tensor< type, 1 > training_error_history
Final training errors of the different neural networks.
string elapsed_time
Elapsed time during the loss of the algortihm.
This structure contains the optimization algorithm results.