scaling_layer.cpp
1// OpenNN: Open Neural Networks Library
2// www.opennn.net
3//
4// S C A L I N G L A Y E R C L A S S
5//
6// Artificial Intelligence Techniques SL
7// artelnics@artelnics.com
8
9#include "scaling_layer.h"
10
11namespace OpenNN
12{
13
16
18{
19 set();
20}
21
22
27
28ScalingLayer::ScalingLayer(const Index& new_neurons_number) : Layer()
29{
30 set(new_neurons_number);
31}
32
33
34ScalingLayer::ScalingLayer(const Tensor<Index, 1>& new_inputs_dimensions) : Layer()
35{
36 set(new_inputs_dimensions);
37}
38
39
44
45ScalingLayer::ScalingLayer(const Tensor<Descriptives, 1>& new_descriptives) : Layer()
46{
47 set(new_descriptives);
48}
49
50
52
54{
55}
56
57
58Tensor<Index, 1> ScalingLayer::get_outputs_dimensions() const
59{
60 return input_variables_dimensions;
61}
62
63
65{
66 return descriptives.size();
67}
68
69
70Index ScalingLayer::get_neurons_number() const
71{
72 return descriptives.size();
73}
74
75
78
79Tensor<Descriptives, 1> ScalingLayer::get_descriptives() const
80{
81 return descriptives;
82}
83
84
87
89{
90 return descriptives(index);
91}
92
93
95
96Tensor<type, 1> ScalingLayer::get_minimums() const
97{
98 const Index neurons_number = get_neurons_number();
99
100 Tensor<type, 1> minimums(neurons_number);
101
102 for(Index i = 0; i < neurons_number; i++)
103 {
104 minimums[i] = descriptives[i].minimum;
105 }
106
107 return minimums;
108}
109
110
112
113Tensor<type, 1> ScalingLayer::get_maximums() const
114{
115 const Index neurons_number = get_neurons_number();
116
117 Tensor<type, 1> maximums(neurons_number);
118
119 for(Index i = 0; i < neurons_number; i++)
120 {
121 maximums[i] = descriptives[i].maximum;
122 }
123
124 return maximums;
125}
126
127
129
130Tensor<type, 1> ScalingLayer::get_means() const
131{
132 const Index neurons_number = get_neurons_number();
133
134 Tensor<type, 1> means(neurons_number);
135
136 for(Index i = 0; i < neurons_number; i++)
137 {
138 means[i] = descriptives[i].mean;
139 }
140
141 return means;
142}
143
144
146
148{
149 const Index neurons_number = get_neurons_number();
150
151 Tensor<type, 1> standard_deviations(neurons_number);
152
153 for(Index i = 0; i < neurons_number; i++)
154 {
155 standard_deviations[i] = descriptives[i].standard_deviation;
156 }
157
158 return standard_deviations;
159}
160
161
163
164const Tensor<Scaler, 1> ScalingLayer::get_scaling_methods() const
165{
166 return scalers;
167}
168
169
171
172Tensor<string, 1> ScalingLayer::write_scalers() const
173{
174 const Index neurons_number = get_neurons_number();
175
176 Tensor<string, 1> scaling_methods_strings(neurons_number);
177
178 for(Index i = 0; i < neurons_number; i++)
179 {
180 if(scalers[i] == Scaler::NoScaling)
181 {
182 scaling_methods_strings[i] = "NoScaling";
183 }
184 else if(scalers[i] == Scaler::MinimumMaximum)
185 {
186 scaling_methods_strings[i] = "MinimumMaximum";
187 }
188 else if(scalers[i] == Scaler::MeanStandardDeviation)
189 {
190 scaling_methods_strings[i] = "MeanStandardDeviation";
191 }
192 else if(scalers[i] == Scaler::StandardDeviation)
193 {
194 scaling_methods_strings[i] = "StandardDeviation";
195 }
196 else if(scalers[i] == Scaler::Logarithm)
197 {
198 scaling_methods_strings[i] = "Logarithm";
199 }
200 else
201 {
202 ostringstream buffer;
203
204 buffer << "OpenNN Exception: ScalingLayer class.\n"
205 << "Tensor<string, 1> write_scalers() const method.\n"
206 << "Unknown " << i << " scaling method.\n";
207
208 throw logic_error(buffer.str());
209 }
210 }
211
212 return scaling_methods_strings;
213}
214
215
218
219Tensor<string, 1> ScalingLayer::write_scalers_text() const
220{
221 const Index neurons_number = get_neurons_number();
222
223#ifdef OPENNN_DEBUG
224
225 if(neurons_number == 0)
226 {
227 ostringstream buffer;
228
229 buffer << "OpenNN Exception: ScalingLayer class.\n"
230 << "Tensor<string, 1> write_scalers() const method.\n"
231 << "Neurons number must be greater than 0.\n";
232
233 throw logic_error(buffer.str());
234 }
235
236#endif
237
238 Tensor<string, 1> scaling_methods_strings(neurons_number);
239
240 for(Index i = 0; i < neurons_number; i++)
241 {
242 if(scalers[i] == Scaler::NoScaling)
243 {
244 scaling_methods_strings[i] = "no scaling";
245 }
246 else if(scalers[i] == Scaler::MeanStandardDeviation)
247 {
248 scaling_methods_strings[i] = "mean and standard deviation";
249 }
250 else if(scalers[i] == Scaler::StandardDeviation)
251 {
252 scaling_methods_strings[i] = "standard deviation";
253 }
254 else if(scalers[i] == Scaler::MinimumMaximum)
255 {
256 scaling_methods_strings[i] = "minimum and maximum";
257 }
258 else if(scalers[i] == Scaler::Logarithm)
259 {
260 scaling_methods_strings[i] = "Logarithm";
261 }
262 else
263 {
264 ostringstream buffer;
265
266 buffer << "OpenNN Exception: ScalingLayer class.\n"
267 << "Tensor<string, 1> write_scalers_text() const method.\n"
268 << "Unknown " << i << " scaling method.\n";
269
270 throw logic_error(buffer.str());
271 }
272 }
273
274 return scaling_methods_strings;
275}
276
277// const bool& get_display() const method
278
281
282const bool& ScalingLayer::get_display() const
283{
284 return display;
285}
286
287
289
291{
292 descriptives.resize(0);
293
294 scalers.resize(0);
295
296 set_default();
297}
298
299
302
303void ScalingLayer::set(const Index& new_inputs_number)
304{
305 descriptives.resize(new_inputs_number);
306
307 scalers.resize(new_inputs_number);
308
309 scalers.setConstant(Scaler::MeanStandardDeviation);
310
311 set_default();
312}
313
314
315void ScalingLayer::set(const Tensor<Index, 1>& new_inputs_dimensions)
316{
317 const Tensor<Index,0> dimension_product = new_inputs_dimensions.prod();
318
319 descriptives.resize(dimension_product(0));
320
321 scalers.resize(dimension_product(0));
322 scalers.setConstant(Scaler::MeanStandardDeviation);
323
324 input_variables_dimensions.resize(new_inputs_dimensions.size());
325
326 input_variables_dimensions = new_inputs_dimensions;
327
328 set_default();
329}
330
331
336
337void ScalingLayer::set(const Tensor<Descriptives, 1>& new_descriptives)
338{
339 descriptives = new_descriptives;
340
341 scalers.resize(new_descriptives.size());
342
343 scalers.setConstant(Scaler::MeanStandardDeviation);
344
345 set_default();
346}
347
348
349void ScalingLayer::set(const Tensor<Descriptives, 1>& new_descriptives, const Tensor<Scaler, 1>& new_scalers)
350{
351 descriptives = new_descriptives;
352
353 scalers = new_scalers;
354}
355
356
359
360void ScalingLayer::set(const tinyxml2::XMLDocument& new_scaling_layer_document)
361{
362 set_default();
363
364 from_XML(new_scaling_layer_document);
365}
366
367
368void ScalingLayer::set_inputs_number(const Index& new_inputs_number)
369{
370 descriptives.resize(new_inputs_number);
371
372 scalers.resize(new_inputs_number);
373
374 scalers.setConstant(Scaler::MeanStandardDeviation);
375}
376
377
378void ScalingLayer::set_neurons_number(const Index& new_neurons_number)
379{
380 descriptives.resize(new_neurons_number);
381
382 scalers.resize(new_neurons_number);
383
384 scalers.setConstant(Scaler::MeanStandardDeviation);
385}
386
387
397
399{
400 layer_name = "scaling_layer";
401
402 set_scalers(Scaler::MeanStandardDeviation);
403
404 set_min_max_range(type(-1), type(1));
405
406 set_display(true);
407
408 layer_type = Type::Scaling;
409}
410
411
414
415void ScalingLayer::set_min_max_range(const type& min, const type& max)
416{
417 min_range = min;
418 max_range = max;
419}
420
421
425
426void ScalingLayer::set_descriptives(const Tensor<Descriptives, 1>& new_descriptives)
427{
428#ifdef OPENNN_DEBUG
429
430 const Index new_descriptives_size = new_descriptives.size();
431
432 const Index neurons_number = get_neurons_number();
433
434 if(new_descriptives_size != neurons_number)
435 {
436 ostringstream buffer;
437
438 buffer << "OpenNN Exception: ScalingLayer class.\n"
439 << "void set_descriptives(const Tensor<Descriptives, 1>&) method.\n"
440 << "Size of descriptives (" << new_descriptives_size << ") is not equal to number of scaling neurons (" << neurons_number << ").\n";
441
442 throw logic_error(buffer.str());
443 }
444
445#endif
446
447 descriptives = new_descriptives;
448}
449
450
454
455void ScalingLayer::set_item_descriptives(const Index& i, const Descriptives& item_descriptives)
456{
457 descriptives(i) = item_descriptives;
458}
459
460
464
465void ScalingLayer::set_minimum(const Index& i, const type& new_minimum)
466{
467 descriptives(i).set_minimum(new_minimum);
468}
469
470
474
475void ScalingLayer::set_maximum(const Index& i, const type& new_maximum)
476{
477 descriptives(i).set_maximum(new_maximum);
478}
479
480
484
485void ScalingLayer::set_mean(const Index& i, const type& new_mean)
486{
487 descriptives(i).set_mean(new_mean);
488}
489
490
494
495void ScalingLayer::set_standard_deviation(const Index& i, const type& new_standard_deviation)
496{
497 descriptives(i).set_standard_deviation(new_standard_deviation);
498}
499
500
503
504void ScalingLayer::set_scalers(const Tensor<Scaler, 1>& new_scaling_methods)
505{
506#ifdef OPENNN_DEBUG
507
508 const Index neurons_number = get_neurons_number();
509
510 if(neurons_number == 0)
511 {
512 ostringstream buffer;
513
514 buffer << "OpenNN Exception: ScalingLayer class.\n"
515 << "void set_scalers(const Tensor<Scaler, 1>&) method.\n"
516 << "Neurons number (" << neurons_number << ") must be greater than 0.\n";
517
518 throw logic_error(buffer.str());
519 }
520
521#endif
522
523 scalers = new_scaling_methods;
524}
525
526
530
531void ScalingLayer::set_scalers(const Tensor<string, 1>& new_scaling_methods_string)
532{
533 const Index neurons_number = get_neurons_number();
534
535#ifdef OPENNN_DEBUG
536
537 if(neurons_number == 0)
538 {
539 ostringstream buffer;
540
541 buffer << "OpenNN Exception: ScalingLayer class.\n"
542 << "void set_scalers(const Tensor<string, 1>&) method.\n"
543 << "Neurons number (" << neurons_number << ") must be greater than 0.\n";
544
545 throw logic_error(buffer.str());
546 }
547
548#endif
549
550 Tensor<Scaler, 1> new_scaling_methods(neurons_number);
551
552 for(Index i = 0; i < neurons_number; i++)
553 {
554 if(new_scaling_methods_string(i) == "NoScaling")
555 {
556 new_scaling_methods(i) = Scaler::NoScaling;
557 }
558 else if(new_scaling_methods_string(i) == "MinimumMaximum")
559 {
560 new_scaling_methods(i) = Scaler::MinimumMaximum;
561 }
562 else if(new_scaling_methods_string(i) == "MeanStandardDeviation")
563 {
564 new_scaling_methods(i) = Scaler::MeanStandardDeviation;
565 }
566 else if(new_scaling_methods_string(i) == "StandardDeviation")
567 {
568 new_scaling_methods(i) = Scaler::StandardDeviation;
569 }
570 else if(new_scaling_methods_string(i) == "Logarithm")
571 {
572 new_scaling_methods(i) = Scaler::Logarithm;
573 }
574 else
575 {
576 ostringstream buffer;
577
578 buffer << "OpenNN Exception: ScalingLayer class.\n"
579 << "void set_scalers(const Tensor<string, 1>&) method.\n"
580 << "Unknown scaling method: " << new_scaling_methods_string[i] << ".\n";
581
582 throw logic_error(buffer.str());
583 }
584 }
585
586 set_scalers(new_scaling_methods);
587}
588
589
593
594void ScalingLayer::set_scalers(const string& new_scaling_methods_string)
595{
596 const Index neurons_number = get_neurons_number();
597
598#ifdef OPENNN_DEBUG
599
600 if(neurons_number == 0)
601 {
602 ostringstream buffer;
603
604 buffer << "OpenNN Exception: ScalingLayer class.\n"
605 << "void set_scalers(const Tensor<string, 1>&) method.\n"
606 << "Neurons number (" << neurons_number << ")must be greater than 0.\n";
607
608 throw logic_error(buffer.str());
609 }
610
611#endif
612
613 Tensor<Scaler, 1> new_scaling_methods(neurons_number);
614
615 for(Index i = 0; i < neurons_number; i++)
616 {
617 if(new_scaling_methods_string == "NoScaling")
618 {
619 new_scaling_methods(i) = Scaler::NoScaling;
620 }
621 else if(new_scaling_methods_string == "MeanStandardDeviation")
622 {
623 new_scaling_methods(i) = Scaler::MeanStandardDeviation;
624 }
625 else if(new_scaling_methods_string == "MinimumMaximum")
626 {
627 new_scaling_methods(i) = Scaler::MinimumMaximum;
628 }
629 else if(new_scaling_methods_string == "StandardDeviation")
630 {
631 new_scaling_methods(i) = Scaler::StandardDeviation;
632 }
633 else if(new_scaling_methods_string == "Logarithm")
634 {
635 new_scaling_methods(i) = Scaler::Logarithm;
636 }
637 else
638 {
639 ostringstream buffer;
640
641 buffer << "OpenNN Exception: ScalingLayer class.\n"
642 << "void set_scalers(const Tensor<string, 1>&) method.\n"
643 << "Unknown scaling method: " << new_scaling_methods_string[i] << ".\n";
644
645 throw logic_error(buffer.str());
646 }
647 }
648
649 set_scalers(new_scaling_methods);
650}
651
652
655
656void ScalingLayer::set_scalers(const Scaler& new_scaling_method)
657{
658 const Index neurons_number = get_neurons_number();
659
660 for(Index i = 0; i < neurons_number; i++)
661 {
662 scalers(i) = new_scaling_method;
663 }
664}
665
666
671
672void ScalingLayer::set_display(const bool& new_display)
673{
674 display = new_display;
675}
676
677
679
681{
682 const Index inputs_number = get_neurons_number();
683
684 if(inputs_number == 0)
685 {
686 return true;
687 }
688 else
689 {
690 return false;
691 }
692}
693
694
700
701void ScalingLayer::check_range(const Tensor<type, 1>& inputs) const
702{
703 const Index inputs_number = get_neurons_number();
704
705#ifdef OPENNN_DEBUG
706
707 const Index size = inputs.size();
708
709 if(size != inputs_number)
710 {
711 ostringstream buffer;
712
713 buffer << "OpenNN Exception: ScalingLayer class.\n"
714 << "void check_range(const Tensor<type, 1>&) const method.\n"
715 << "Size of inputs must be equal to number of inputs.\n";
716
717 throw logic_error(buffer.str());
718 }
719
720#endif
721
722 // Check inputs
723
724 if(display)
725 {
726 for(Index i = 0; i < inputs_number; i++)
727 {
728 if(inputs(i) < descriptives(i).minimum)
729 {
730 cout << "OpenNN Warning: ScalingLayer class.\n"
731 << "void check_range(const Tensor<type, 1>&) const method.\n"
732 << "Input value " << i << " is less than corresponding minimum.\n";
733 }
734
735 if(inputs(i) > descriptives(i).maximum)
736 {
737 cout << "OpenNN Warning: ScalingLayer class.\n"
738 << "void check_range(const Tensor<type, 1>&) const method.\n"
739 << "Input value " << i << " is greater than corresponding maximum.\n";
740 }
741 }
742 }
743}
744
745
748
749Tensor<type, 2> ScalingLayer::calculate_outputs(const Tensor<type, 2>& inputs)
750{
751 Tensor<type, 2> outputs;
752
753 const Index neurons_number = get_neurons_number();
754
755#ifdef OPENNN_DEBUG
756
757 ostringstream buffer;
758
759 const Index columns_number = inputs.dimension(1);
760
761 if(columns_number != neurons_number)
762 {
763 buffer << "OpenNN Exception: ScalingLayer class.\n"
764 << "Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
765 << "Size of inputs (" << columns_number << ") must be equal to number of scaling neurons (" << neurons_number << ").\n";
766
767 throw logic_error(buffer.str());
768 }
769
770#endif
771
772 const Index points_number = inputs.dimension(0);
773
774 outputs.resize(points_number, neurons_number);
775
776 for(Index i = 0; i < points_number; i++)
777 {
778 for(Index j = 0; j < neurons_number; j++)
779 {
780 if(abs(descriptives(j).minimum - descriptives(j).maximum) < type(NUMERIC_LIMITS_MIN))
781 {
782 if(display)
783 {
784 cout << "OpenNN Warning: ScalingLayer class.\n"
785 << "Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
786 << "Standard deviation of variable " << i << " is zero.\n"
787 << "Those variables won't be scaled.\n";
788 }
789
790 outputs(j) = inputs(j);
791 }
792 else
793 {
794 if(scalers(j) == Scaler::NoScaling)
795 {
796 outputs(i,j) = inputs(i,j);
797 }
798 else if(scalers(j) == Scaler::MinimumMaximum)
799 {
800 const type slope =
801 (max_range-min_range)/(descriptives(j).maximum-descriptives(j).minimum);
802
803 const type intercept =
804 (min_range*descriptives(j).maximum-max_range*descriptives(j).minimum)/(descriptives(j).maximum-descriptives(j).minimum);
805
806 outputs(i,j) = inputs(i,j)*slope + intercept;
807 }
808 else if(scalers(j) == Scaler::MeanStandardDeviation)
809 {
810 const type slope = static_cast<type>(1)/descriptives(j).standard_deviation;
811
812 const type intercept = -descriptives(j).mean/descriptives(j).standard_deviation;
813
814 outputs(i,j) = inputs(i,j)*slope + intercept;
815
816 }
817 else if(scalers(j) == Scaler::StandardDeviation)
818 {
819 outputs(i,j) = inputs(i,j)/descriptives(j).standard_deviation;
820 }
821 else if(scalers(j) == Scaler::Logarithm)
822 {
823 outputs(i,j) = log(inputs(i,j));
824 }
825 else
826 {
827 ostringstream buffer;
828
829 buffer << "OpenNN Exception: ScalingLayer class\n"
830 << "Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
831 << "Unknown scaling method.\n";
832
833 throw logic_error(buffer.str());
834 }
835 }
836 }
837 }
838
839 return outputs;
840}
841
842
843Tensor<type, 4> ScalingLayer::calculate_outputs(const Tensor<type, 4>& inputs)
844{
845 Tensor<type, 4> outputs;
846
847 const Index neurons_number = get_neurons_number();
848
849#ifdef OPENNN_DEBUG
850
851 ostringstream buffer;
852
853 const Index columns_number = inputs.dimension(1) * inputs.dimension(2) * inputs.dimension(3);
854
855 if(columns_number != neurons_number)
856 {
857 buffer << "OpenNN Exception: ScalingLayer class.\n"
858 << "Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
859 << "Size of inputs (" << columns_number << ") must be equal to number of scaling neurons (" << neurons_number << ").\n";
860
861 throw logic_error(buffer.str());
862 }
863
864#endif
865
866 const Index points_number = inputs.dimension(0);
867
868 for(Index i = 0; i < points_number; i++)
869 {
870 for(Index j = 0; j < neurons_number; j++)
871 {
872 const Index channel_index = j%inputs.dimension(1);
873 const Index row_index = (j/(inputs.dimension(1)))%inputs.dimension(2);
874 const Index column_index = (j/(inputs.dimension(1) * inputs.dimension(2)))%inputs.dimension(3);
875
876 if(abs(descriptives(j).minimum - descriptives(j).maximum) < type(NUMERIC_LIMITS_MIN))
877 {
878 if(display)
879 {
880 cout << "OpenNN Warning: ScalingLayer class.\n"
881 << "Tensor<type, 2> calculate_mean_standard_deviation_outputs(const Tensor<type, 2>&) const method.\n"
882 << "Standard deviation of variable " << i << " is zero.\n"
883 << "Those variables won't be scaled.\n";
884 }
885
886 outputs(j) = inputs(j);
887 }
888 else
889 {
890 if(scalers(j) == Scaler::NoScaling)
891 {
892 outputs(i, channel_index, row_index, column_index) = inputs(i, channel_index, row_index, column_index);
893 }
894 else if(scalers(j) == Scaler::MinimumMaximum)
895 {
896 outputs(i, channel_index, row_index, column_index) = static_cast<type>(2)*(inputs(i, channel_index, row_index, column_index) - descriptives(j).minimum)/(descriptives(j).maximum-descriptives(j).minimum) - static_cast<type>(1);
897 }
898 else if(scalers(j) == Scaler::MeanStandardDeviation)
899 {
900 outputs(i, channel_index, row_index, column_index) = (inputs(i, channel_index, row_index, column_index) - descriptives(j).mean)/descriptives(j).standard_deviation;
901 }
902 else if(scalers(j) == Scaler::StandardDeviation)
903 {
904 outputs(i, channel_index, row_index, column_index) = inputs(i, channel_index, row_index, column_index)/descriptives(j).standard_deviation;
905 }
906 else if(scalers(j) == Scaler::Logarithm)
907 {
908 outputs(i, channel_index, row_index, column_index) = log(inputs(i, channel_index, row_index, column_index));
909 }
910 else
911 {
912 ostringstream buffer;
913
914 buffer << "OpenNN Exception: ScalingLayer class\n"
915 << "Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
916 << "Unknown scaling method.\n";
917
918 throw logic_error(buffer.str());
919 }
920 }
921 }
922 }
923
924 return outputs;
925}
926
927
931
932string ScalingLayer::write_no_scaling_expression(const Tensor<string, 1>& inputs_names, const Tensor<string, 1>& outputs_names) const
933{
934 const Index inputs_number = get_neurons_number();
935
936 ostringstream buffer;
937
938 buffer.precision(10);
939
940 for(Index i = 0; i < inputs_number; i++)
941 {
942 buffer << outputs_names(i) << " = " << inputs_names(i) << ";\n";
943 }
944
945 return buffer.str();
946}
947
948
952
953string ScalingLayer::write_minimum_maximum_expression(const Tensor<string, 1>& inputs_names, const Tensor<string, 1>& outputs_names) const
954{
955 const Index inputs_number = get_neurons_number();
956
957 ostringstream buffer;
958
959 buffer.precision(10);
960
961 for(Index i = 0; i < inputs_number; i++)
962 {
963 buffer << outputs_names(i) << " = 2*(" << inputs_names(i) << "-(" << descriptives(i).minimum << "))/(" << descriptives(i).maximum << "-(" << descriptives(i).minimum << "))-1;\n";
964 }
965
966 return buffer.str();
967}
968
969
973
974string ScalingLayer::write_mean_standard_deviation_expression(const Tensor<string, 1>& inputs_names, const Tensor<string, 1>& outputs_names) const
975{
976 const Index inputs_number = get_neurons_number();
977
978 ostringstream buffer;
979
980 buffer.precision(10);
981
982 for(Index i = 0; i < inputs_number; i++)
983 {
984 buffer << outputs_names(i) << " = (" << inputs_names(i) << "-(" << descriptives(i).mean << "))/" << descriptives(i).standard_deviation << ";\n";
985 }
986
987 return buffer.str();
988}
989
990
994
995string ScalingLayer::write_standard_deviation_expression(const Tensor<string, 1>& inputs_names, const Tensor<string, 1>& outputs_names) const
996{
997 const Index inputs_number = get_neurons_number();
998
999 ostringstream buffer;
1000
1001 buffer.precision(10);
1002
1003 for(Index i = 0; i < inputs_number; i++)
1004 {
1005 buffer << outputs_names(i) << " = " << inputs_names(i) << "/(" << descriptives(i).standard_deviation << ");\n";
1006 }
1007
1008 return buffer.str();
1009}
1010
1011
1013
1014string ScalingLayer::write_expression(const Tensor<string, 1>& inputs_names, const Tensor<string, 1>&) const
1015{
1016 const Index neurons_number = get_neurons_number();
1017
1018 ostringstream buffer;
1019
1020 buffer.precision(10);
1021
1022 for(Index i = 0; i < neurons_number; i++)
1023 {
1024 if(scalers(i) == Scaler::NoScaling)
1025 {
1026 buffer << "scaled_" << inputs_names(i) << " = " << inputs_names(i) << ";\n";
1027 }
1028 else if(scalers(i) == Scaler::MinimumMaximum)
1029 {
1030 buffer << "scaled_" << inputs_names(i) << " = " << inputs_names(i) << "*(" << max_range << "-" << min_range << ")/(" << descriptives(i).maximum << "-(" << descriptives(i).minimum << "))-" << descriptives(i).minimum << "*(" << max_range << "-" << min_range << ")/(" << descriptives(i).maximum << "-" << descriptives(i).minimum << ")+" << min_range << ";\n";
1031 }
1032 else if(scalers(i) == Scaler::MeanStandardDeviation)
1033 {
1034 buffer << "scaled_" << inputs_names(i) << " = (" << inputs_names(i) << "-" << descriptives(i).mean << ")/" << descriptives(i).standard_deviation << ";\n";
1035 }
1036 else if(scalers(i) == Scaler::StandardDeviation)
1037 {
1038 buffer << "scaled_" << inputs_names(i) << " = " << inputs_names(i) << "/(" << descriptives(i).standard_deviation << ");\n";
1039 }
1040 else
1041 {
1042 ostringstream buffer;
1043
1044 buffer << "OpenNN Exception: ScalingLayer class.\n"
1045 << "string write_expression() const method.\n"
1046 << "Unknown inputs scaling method.\n";
1047
1048 throw logic_error(buffer.str());
1049 }
1050 }
1051
1052 string expression = buffer.str();
1053
1054 replace(expression, "+-", "-");
1055 replace(expression, "--", "+");
1056
1057 return expression;
1058
1059}
1060
1061
1064
1066{
1067 const Index neurons_number = get_neurons_number();
1068
1069 ostringstream buffer;
1070
1071 buffer.precision(10);
1072
1073 buffer << "vector<float> " << layer_name << "(const vector<float>& inputs)\n{" << endl;
1074
1075 buffer << "\tvector<float> outputs(" << neurons_number << ");\n" << endl;
1076
1077 for(Index i = 0; i < neurons_number; i++)
1078 {
1079 if(scalers(i) == Scaler::NoScaling)
1080 {
1081 buffer << "\toutputs[" << i << "] = inputs[" << i << "];" << endl;
1082 }
1083 else if(scalers(i) == Scaler::MinimumMaximum)
1084 {
1085 const type slope = (max_range-min_range)/(descriptives(i).maximum-descriptives(i).minimum);
1086
1087 const type intercept = -(descriptives(i).minimum*(max_range-min_range))/(descriptives(i).maximum - descriptives(i).minimum) + min_range;
1088
1089 buffer << "\toutputs[" << i << "] = inputs[" << i << "]*"<<slope<<"+"<<intercept<<";\n";
1090 }
1091 else if(scalers(i) == Scaler::MeanStandardDeviation)
1092 {
1093 const type standard_deviation = descriptives(i).standard_deviation;
1094
1095 const type mean = descriptives(i).mean;
1096
1097 buffer << "\toutputs[" << i << "] = (inputs[" << i << "]-"<<mean<<")/"<<standard_deviation<<";\n";
1098 }
1099 else if(scalers(i) == Scaler::StandardDeviation)
1100 {
1101 const type standard_deviation = descriptives(i).standard_deviation;
1102
1103 buffer << "\toutputs[" << i << "] = inputs[" << i << "]/" << standard_deviation << " ;" << endl;
1104 }
1105 else if(scalers(i) == Scaler::Logarithm)
1106 {
1107 buffer << "\toutputs[" << i << "] = log(inputs[" << i << "])"<< " ;" << endl;
1108 }
1109 else
1110 {
1111 ostringstream buffer;
1112
1113 buffer << "OpenNN Exception: ScalingLayer class.\n"
1114 << "string write_expression() const method.\n"
1115 << "Unknown inputs scaling method.\n";
1116
1117 throw logic_error(buffer.str());
1118 }
1119 }
1120
1121 buffer << "\n\treturn outputs;\n}" << endl;
1122
1123 return buffer.str();
1124}
1125
1126
1127string ScalingLayer::write_expression_python() const
1128{
1129 const Index neurons_number = get_neurons_number();
1130
1131 ostringstream buffer;
1132
1133 buffer.precision(10);
1134
1135 buffer << "\tdef " << layer_name << "(self,inputs):\n" << endl;
1136
1137 buffer << "\t\toutputs = [None] * "<<neurons_number<<"\n" << endl;
1138
1139 for(Index i = 0; i < neurons_number; i++)
1140 {
1141 if(scalers(i) == Scaler::NoScaling)
1142 {
1143 buffer << "\t\toutputs[" << i << "] = inputs[" << i << "]\n" << endl;
1144 }
1145 else if(scalers(i) == Scaler::MinimumMaximum)
1146 {
1147 const type slope = (max_range-min_range)/(descriptives(i).maximum-descriptives(i).minimum);
1148
1149 const type intercept = -(descriptives(i).minimum*(max_range-min_range))/(descriptives(i).maximum - descriptives(i).minimum) + min_range;
1150
1151 buffer << "\t\toutputs[" << i << "] = inputs[" << i << "]*"<<slope<<"+"<<intercept<<"\n";
1152 }
1153 else if(scalers(i) == Scaler::MeanStandardDeviation)
1154 {
1155 const type standard_deviation = descriptives(i).standard_deviation;
1156
1157 const type mean = descriptives(i).mean;
1158
1159 buffer << "\t\toutputs[" << i << "] = (inputs[" << i << "]-"<<mean<<")/"<<standard_deviation<<"\n";
1160 }
1161 else if(scalers(i) == Scaler::StandardDeviation)
1162 {
1163 buffer << "\t\toutputs[" << i << "] = inputs[" << i << "]/" << descriptives(i).standard_deviation << "\n " << endl;
1164 }
1165 else if(scalers(i) == Scaler::Logarithm)
1166 {
1167 buffer << "\t\toutputs[" << i << "] = np.log(inputs[" << i << "])\n"<< endl;
1168 }
1169 else
1170 {
1171 ostringstream buffer;
1172
1173 buffer << "OpenNN Exception: ScalingLayer class.\n"
1174 << "string write_expression() const method.\n"
1175 << "Unknown inputs scaling method.\n";
1176
1177 throw logic_error(buffer.str());
1178 }
1179 }
1180
1181 buffer << "\n\t\treturn outputs;\n" << endl;
1182
1183 return buffer.str();
1184}
1185
1186
1189
1191{
1192 ostringstream buffer;
1193
1194 const Index neurons_number = get_neurons_number();
1195
1196 // Scaling layer
1197
1198 file_stream.OpenElement("ScalingLayer");
1199
1200 // Scaling neurons number
1201
1202 file_stream.OpenElement("ScalingNeuronsNumber");
1203
1204 buffer.str("");
1205 buffer << neurons_number;
1206
1207 file_stream.PushText(buffer.str().c_str());
1208
1209 file_stream.CloseElement();
1210
1211 const Tensor<string, 1> scaling_methods_string = write_scalers();
1212
1213 // Scaling neurons
1214
1215 for(Index i = 0; i < neurons_number; i++)
1216 {
1217 // Scaling neuron
1218
1219 file_stream.OpenElement("ScalingNeuron");
1220
1221 file_stream.PushAttribute("Index", int(i+1));
1222
1223 // Minimum
1224
1225 file_stream.OpenElement("Minimum");
1226
1227 buffer.str("");
1228 buffer << descriptives(i).minimum;
1229
1230 file_stream.PushText(buffer.str().c_str());
1231
1232 file_stream.CloseElement();
1233
1234 // Maximum
1235
1236 file_stream.OpenElement("Maximum");
1237
1238 buffer.str("");
1239 buffer << descriptives(i).maximum;
1240
1241 file_stream.PushText(buffer.str().c_str());
1242
1243 file_stream.CloseElement();
1244
1245 // Mean
1246
1247 file_stream.OpenElement("Mean");
1248
1249 buffer.str("");
1250 buffer << descriptives(i).mean;
1251
1252 file_stream.PushText(buffer.str().c_str());
1253
1254 file_stream.CloseElement();
1255
1256 // Standard deviation
1257
1258 file_stream.OpenElement("StandardDeviation");
1259
1260 buffer.str("");
1261 buffer << descriptives(i).standard_deviation;
1262
1263 file_stream.PushText(buffer.str().c_str());
1264
1265 file_stream.CloseElement();
1266
1267 // Scaler
1268
1269 file_stream.OpenElement("Scaler");
1270
1271 buffer.str("");
1272 buffer << scaling_methods_string(i);
1273
1274 file_stream.PushText(buffer.str().c_str());
1275
1276 file_stream.CloseElement();
1277
1278 // Scaling neuron (end tag)
1279
1280 file_stream.CloseElement();
1281 }
1282
1283 // Scaling layer (end tag)
1284
1285 file_stream.CloseElement();
1286}
1287
1288
1291
1293{
1294 ostringstream buffer;
1295
1296 const tinyxml2::XMLElement* scaling_layer_element = document.FirstChildElement("ScalingLayer");
1297
1298 if(!scaling_layer_element)
1299 {
1300 buffer << "OpenNN Exception: ScalingLayer class.\n"
1301 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1302 << "Scaling layer element is nullptr.\n";
1303
1304 throw logic_error(buffer.str());
1305 }
1306
1307 // Scaling neurons number
1308
1309 const tinyxml2::XMLElement* neurons_number_element = scaling_layer_element->FirstChildElement("ScalingNeuronsNumber");
1310
1311 if(!neurons_number_element)
1312 {
1313 buffer << "OpenNN Exception: ScalingLayer class.\n"
1314 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1315 << "Scaling neurons number element is nullptr.\n";
1316
1317 throw logic_error(buffer.str());
1318 }
1319
1320 const Index neurons_number = static_cast<Index>(atoi(neurons_number_element->GetText()));
1321
1322 set(neurons_number);
1323
1324 unsigned index = 0; // Index does not work
1325
1326 const tinyxml2::XMLElement* start_element = neurons_number_element;
1327
1328 for(Index i = 0; i < neurons_number; i++)
1329 {
1330 const tinyxml2::XMLElement* scaling_neuron_element = start_element->NextSiblingElement("ScalingNeuron");
1331 start_element = scaling_neuron_element;
1332
1333 if(!scaling_neuron_element)
1334 {
1335 buffer << "OpenNN Exception: ScalingLayer class.\n"
1336 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1337 << "Scaling neuron " << i+1 << " is nullptr.\n";
1338
1339 throw logic_error(buffer.str());
1340 }
1341
1342 scaling_neuron_element->QueryUnsignedAttribute("Index", &index);
1343
1344 if(index != i+1)
1345 {
1346 buffer << "OpenNN Exception: ScalingLayer class.\n"
1347 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1348 << "Index " << index << " is not correct.\n";
1349
1350 throw logic_error(buffer.str());
1351 }
1352
1353 // Minimum
1354
1355 const tinyxml2::XMLElement* minimum_element = scaling_neuron_element->FirstChildElement("Minimum");
1356
1357 if(!minimum_element)
1358 {
1359 buffer << "OpenNN Exception: ScalingLayer class.\n"
1360 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1361 << "Minimum element " << i+1 << " is nullptr.\n";
1362
1363 throw logic_error(buffer.str());
1364 }
1365
1366 if(minimum_element->GetText())
1367 {
1368 descriptives[i].minimum = static_cast<type>(atof(minimum_element->GetText()));
1369 }
1370
1371 // Maximum
1372
1373 const tinyxml2::XMLElement* maximum_element = scaling_neuron_element->FirstChildElement("Maximum");
1374
1375 if(!maximum_element)
1376 {
1377 buffer << "OpenNN Exception: ScalingLayer class.\n"
1378 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1379 << "Maximum element " << i+1 << " is nullptr.\n";
1380
1381 throw logic_error(buffer.str());
1382 }
1383
1384 if(maximum_element->GetText())
1385 {
1386 descriptives[i].maximum = static_cast<type>(atof(maximum_element->GetText()));
1387 }
1388
1389 // Mean
1390
1391 const tinyxml2::XMLElement* mean_element = scaling_neuron_element->FirstChildElement("Mean");
1392
1393 if(!mean_element)
1394 {
1395 buffer << "OpenNN Exception: ScalingLayer class.\n"
1396 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1397 << "Mean element " << i+1 << " is nullptr.\n";
1398
1399 throw logic_error(buffer.str());
1400 }
1401
1402 if(mean_element->GetText())
1403 {
1404 descriptives[i].mean = static_cast<type>(atof(mean_element->GetText()));
1405 }
1406
1407 // Standard deviation
1408
1409 const tinyxml2::XMLElement* standard_deviation_element = scaling_neuron_element->FirstChildElement("StandardDeviation");
1410
1411 if(!standard_deviation_element)
1412 {
1413 buffer << "OpenNN Exception: ScalingLayer class.\n"
1414 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1415 << "Standard deviation element " << i+1 << " is nullptr.\n";
1416
1417 throw logic_error(buffer.str());
1418 }
1419
1420 if(standard_deviation_element->GetText())
1421 {
1422 descriptives[i].standard_deviation = static_cast<type>(atof(standard_deviation_element->GetText()));
1423 }
1424
1425 // Scaling method
1426
1427 const tinyxml2::XMLElement* scaling_method_element = scaling_neuron_element->FirstChildElement("Scaler");
1428
1429 if(!scaling_method_element)
1430 {
1431 buffer << "OpenNN Exception: ScalingLayer class.\n"
1432 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1433 << "Scaling method element " << i+1 << " is nullptr.\n";
1434
1435 throw logic_error(buffer.str());
1436 }
1437
1438 string new_method = scaling_method_element->GetText();
1439
1440 if(new_method == "NoScaling" || new_method == "No Scaling")
1441 {
1442 scalers[i] = Scaler::NoScaling;
1443 }
1444 else if(new_method == "MinimumMaximum" || new_method == "Minimum - Maximum")
1445 {
1446 scalers[i] = Scaler::MinimumMaximum;
1447 }
1448 else if(new_method == "MeanStandardDeviation" || new_method == "Mean - Standard deviation")
1449 {
1450 scalers[i] = Scaler::MeanStandardDeviation;
1451 }
1452 else if(new_method == "StandardDeviation")
1453 {
1454 scalers[i] = Scaler::StandardDeviation;
1455 }
1456 else if(new_method == "Logarithm")
1457 {
1458 scalers[i] = Scaler::Logarithm;
1459 }
1460 else
1461 {
1462 scalers[i] = Scaler::NoScaling;
1463 }
1464 }
1465
1466 // Display
1467 {
1468 const tinyxml2::XMLElement* display_element = scaling_layer_element->FirstChildElement("Display");
1469
1470 if(display_element)
1471 {
1472 string new_display_string = display_element->GetText();
1473
1474 try
1475 {
1476 set_display(new_display_string != "0");
1477 }
1478 catch(const logic_error& e)
1479 {
1480 cerr << e.what() << endl;
1481 }
1482 }
1483 }
1484}
1485
1486}
1487
1488// OpenNN: Open Neural Networks Library.
1489// Copyright(C) 2005-2021 Artificial Intelligence Techniques, SL.
1490//
1491// This library is free software; you can redistribute it and/or
1492// modify it under the terms of the GNU Lesser General Public
1493// License as published by the Free Software Foundation; either
1494// version 2.1 of the License, or any later version.
1495//
1496// This library is distributed in the hope that it will be useful,
1497// but WITHOUT ANY WARRANTY; without even the implied warranty of
1498// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1499// Lesser General Public License for more details.
1500
1501// You should have received a copy of the GNU Lesser General Public
1502// License along with this library; if not, write to the Free Software
1503// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This abstract class represents the concept of layer of neurons in OpenNN.
Definition: layer.h:53
string layer_name
Layer name.
Definition: layer.h:179
Type layer_type
Layer type.
Definition: layer.h:183
string write_expression_c() const
write_expression_c
void set_maximum(const Index &, const type &)
void set_descriptives(const Tensor< Descriptives, 1 > &)
void set_item_descriptives(const Index &, const Descriptives &)
Tensor< type, 1 > get_means() const
Returns a single matrix with the means of all scaling neurons.
void set_minimum(const Index &, const type &)
string write_mean_standard_deviation_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
Tensor< string, 1 > write_scalers() const
Returns a vector of strings with the name of the method used for each scaling neuron.
const bool & get_display() const
void set_standard_deviation(const Index &, const type &)
Index get_inputs_number() const
Returns the number of inputs.
string write_standard_deviation_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
string write_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
Returns a string with the expression of the inputs scaling process.
virtual void from_XML(const tinyxml2::XMLDocument &)
Tensor< Scaler, 1 > scalers
Vector of scaling methods for each variable.
void check_range(const Tensor< type, 1 > &) const
bool display
Display warning messages to screen.
bool is_empty() const
Returns true if the number of scaling neurons is zero, and false otherwise.
virtual ~ScalingLayer()
Destructor.
void set()
Sets the scaling layer to be empty.
Tensor< type, 1 > get_minimums() const
Returns a single matrix with the minimums of all scaling neurons.
string write_minimum_maximum_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
Tensor< type, 1 > get_maximums() const
Returns a single matrix with the maximums of all scaling neurons.
string write_no_scaling_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
void set_scalers(const Tensor< Scaler, 1 > &)
Tensor< type, 2 > calculate_outputs(const Tensor< type, 2 > &)
Tensor< Descriptives, 1 > get_descriptives() const
type min_range
min and max range for minmaxscaling
const Tensor< Scaler, 1 > get_scaling_methods() const
Returns the methods used for scaling.
Tensor< Descriptives, 1 > descriptives
Descriptives of input variables.
Tensor< string, 1 > write_scalers_text() const
void set_display(const bool &)
Tensor< type, 1 > get_standard_deviations() const
Returns a single matrix with the standard deviations of all scaling neurons.
void write_XML(tinyxml2::XMLPrinter &) const
void set_mean(const Index &, const type &)
void set_min_max_range(const type &min, const type &max)
XMLError QueryUnsignedAttribute(const char *name, unsigned int *value) const
See QueryIntAttribute()
Definition: tinyxml2.h:1328
const XMLElement * NextSiblingElement(const char *name=nullptr) const
Get the next(right) sibling element of this node, with an optionally supplied name.
Definition: tinyxml2.cpp:1059
void PushText(const char *text, bool cdata=false)
Add a text node.
Definition: tinyxml2.cpp:2878
void PushAttribute(const char *name, const char *value)
If streaming, add an attribute to an open element.
Definition: tinyxml2.cpp:2783
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
Definition: tinyxml2.cpp:2834
HALF_CONSTEXPR half abs(half arg)
Definition: half.hpp:2735
half log(half arg)
Definition: half.hpp:3050
This structure contains the simplest Descriptives for a set, variable, etc. It includes :
Definition: statistics.h:40