unscaling_layer.cpp
1// OpenNN: Open Neural Networks Library
2// www.opennn.net
3//
4// U N S C A L I N G L A Y E R C L A S S
5//
6// Artificial Intelligence Techniques SL
7// artelnics@artelnics.com
8
9#include "unscaling_layer.h"
10
11namespace OpenNN
12{
13
15
17{
18 set();
19}
20
21
23
24UnscalingLayer::UnscalingLayer(const Index& new_neurons_number) : Layer()
25{
26 set(new_neurons_number);
27}
28
29
31
32UnscalingLayer::UnscalingLayer(const Tensor<Descriptives, 1>& new_descriptives) : Layer()
33{
34 set(new_descriptives);
35}
36
37
39
41{
42}
43
44
46
48{
49 return descriptives.size();
50}
51
52
54
56{
57 return descriptives.size();
58}
59
60
69
70Tensor<Descriptives, 1> UnscalingLayer::get_descriptives() const
71{
72 return descriptives;
73}
74
75
78
79Tensor<type, 1> UnscalingLayer::get_minimums() const
80{
81 const Index neurons_number = get_neurons_number();
82
83 Tensor<type, 1> minimums(neurons_number);
84
85 for(Index i = 0; i < neurons_number; i++)
86 {
87 minimums[i] = descriptives[i].minimum;
88 }
89
90 return minimums;
91}
92
93
96
97Tensor<type, 1> UnscalingLayer::get_maximums() const
98{
99 const Index neurons_number = get_neurons_number();
100
101 Tensor<type, 1> maximums(neurons_number);
102
103 for(Index i = 0; i < neurons_number; i++)
104 {
105 maximums[i] = descriptives[i].maximum;
106 }
107
108 return maximums;
109}
110
111
114
115const Tensor<Scaler, 1> UnscalingLayer::get_unscaling_method() const
116{
117 return scalers;
118}
119
120
122
123string UnscalingLayer::write_expression(const Tensor<string, 1>& inputs_names, const Tensor<string, 1>& outputs_names) const
124{
125 const Index neurons_number = get_neurons_number();
126
127 ostringstream buffer;
128
129 buffer.precision(10);
130
131 for(Index i = 0; i < neurons_number; i++)
132 {
133 if(scalers(i) == Scaler::NoScaling)
134 {
135 buffer << outputs_names(i) << " = " << inputs_names(i) << ";\n";
136 }
137 else if(scalers(i) == Scaler::MinimumMaximum)
138 {
139 buffer << outputs_names(i) << " = " << inputs_names(i) << "*(" << descriptives(i).maximum << "-" << descriptives(i).minimum << ")/(" << max_range << "-" << min_range << ")+" << descriptives(i).minimum << "-" << min_range << "*(" << descriptives(i).maximum << "-" << descriptives(i).minimum << ")/(" << max_range << "-" << min_range << ");\n";
140 }
141 else if(scalers(i) == Scaler::MeanStandardDeviation)
142 {
143 buffer << outputs_names(i) << " = " << descriptives(i).minimum << "+0.5*(" << inputs_names(i) << "+1)*(" << descriptives(i).maximum << "-" << descriptives(i).minimum << ");\n";
144 }
145 else if(scalers(i) == Scaler::Logarithm)
146 {
147 buffer << outputs_names(i) << " = " << descriptives(i).minimum << "+0.5*(exp(" << inputs_names(i) << ")+1)*(" << descriptives(i).maximum << "-" << descriptives(i).minimum << ");\n";
148 }
149 else
150 {
151 ostringstream buffer;
152
153 buffer << "OpenNN Exception: UnscalingLayer class.\n"
154 << "string write_expression() const method.\n"
155 << "Unknown inputs scaling method.\n";
156
157 throw logic_error(buffer.str());
158 }
159 }
160
161 string expression = buffer.str();
162
163 replace(expression, "+-", "-");
164 replace(expression, "--", "+");
165
166 return expression;
167}
168
170
172{
173 const Index neurons_number = get_neurons_number();
174
175 Tensor<string, 1> scaling_methods_strings(neurons_number);
176
177 for(Index i = 0; i < neurons_number; i++)
178 {
179 if(scalers[i] == Scaler::NoScaling)
180 {
181 scaling_methods_strings[i] = "NoScaling";
182 }
183 else if(scalers[i] == Scaler::MinimumMaximum)
184 {
185 scaling_methods_strings[i] = "MinimumMaximum";
186 }
187 else if(scalers[i] == Scaler::MeanStandardDeviation)
188 {
189 scaling_methods_strings[i] = "MeanStandardDeviation";
190 }
191 else if(scalers[i] == Scaler::StandardDeviation)
192 {
193 scaling_methods_strings[i] = "StandardDeviation";
194 }
195 else if(scalers[i] == Scaler::Logarithm)
196 {
197 scaling_methods_strings[i] = "Logarithm";
198 }
199 else
200 {
201 ostringstream buffer;
202
203 buffer << "OpenNN Exception: UnscalingLayer class.\n"
204 << "Tensor<string, 1> write_unscaling_method() const.\n"
205 << "Unknown unscaling method.\n";
206
207 throw logic_error(buffer.str());
208 }
209 }
210
211 return scaling_methods_strings;
212}
213
214
217
219{
220 const Index neurons_number = get_neurons_number();
221
222 Tensor<string, 1> scaling_methods_strings(neurons_number);
223
224 for(Index i = 0; i < neurons_number; i++)
225 {
226 if(scalers[i] == Scaler::NoScaling)
227 {
228 scaling_methods_strings[i] = "no unscaling";
229 }
230 else if(scalers[i] == Scaler::MinimumMaximum)
231 {
232 scaling_methods_strings[i] = "minimum and maximum";
233 }
234 else if(scalers[i] == Scaler::MeanStandardDeviation)
235 {
236 scaling_methods_strings[i] = "mean and standard deviation";
237 }
238 else if(scalers[i] == Scaler::StandardDeviation)
239 {
240 scaling_methods_strings[i] = "standard deviation";
241 }
242 else if(scalers[i] == Scaler::Logarithm)
243 {
244 scaling_methods_strings[i] = "logarithm";
245 }
246 else
247 {
248 ostringstream buffer;
249
250 buffer << "OpenNN Exception: UnscalingLayer class.\n"
251 << "Tensor<string, 1> write_unscaling_method() const.\n"
252 << "Unknown unscaling method.\n";
253
254 throw logic_error(buffer.str());
255 }
256 }
257
258 return scaling_methods_strings;
259}
260
261
264
266{
267 return display;
268}
269
270
272
274{
275 descriptives.resize(0);
276
277 scalers.resize(0);
278
279 set_default();
280}
281
282
283void UnscalingLayer::set_inputs_number(const Index& new_inputs_number)
284{
285 descriptives.resize(new_inputs_number);
286}
287
288
289void UnscalingLayer::set_neurons_number(const Index& new_neurons_number)
290{
291 descriptives.resize(new_neurons_number);
292}
293
294
297
298void UnscalingLayer::set(const Index& new_neurons_number)
299{
300 descriptives.resize(new_neurons_number);
301
302 scalers.resize(new_neurons_number);
303
304 scalers.setConstant(Scaler::MinimumMaximum);
305
306 set_default();
307}
308
309
314
315void UnscalingLayer::set(const Tensor<Descriptives, 1>& new_descriptives)
316{
317 descriptives = new_descriptives;
318
319 scalers.resize(new_descriptives.size());
320
321 scalers.setConstant(Scaler::MinimumMaximum);
322
323 set_default();
324}
325
326
327void UnscalingLayer::set(const Tensor<Descriptives, 1>& new_descriptives, const Tensor<Scaler, 1>& new_scalers)
328{
329 descriptives = new_descriptives;
330
331 scalers = new_scalers;
332}
333
334
337
338void UnscalingLayer::set(const tinyxml2::XMLDocument& new_unscaling_layer_document)
339{
340 set_default();
341
342 from_XML(new_unscaling_layer_document);
343}
344
345
347
348void UnscalingLayer::set(const UnscalingLayer& new_unscaling_layer)
349{
350 descriptives = new_unscaling_layer.descriptives;
351
352 scalers = new_unscaling_layer.scalers;
353
354 display = new_unscaling_layer.display;
355}
356
357
367
369{
370 layer_name = "unscaling_layer";
371
372 set_scalers(Scaler::MinimumMaximum);
373
374 set_min_max_range(type(-1), type(1));
375
376 set_display(true);
377
378 layer_type = Type::Unscaling;
379}
380
383
384void UnscalingLayer::set_min_max_range(const type min, const type max)
385{
386 min_range = min;
387 max_range = max;
388}
389
390
394
395void UnscalingLayer::set_descriptives(const Tensor<Descriptives, 1>& new_descriptives)
396{
397#ifdef OPENNN_DEBUG
398
399 const Index neurons_number = get_neurons_number();
400
401 const Index new_descriptives_size = new_descriptives.size();
402
403 if(new_descriptives_size != neurons_number)
404 {
405 ostringstream buffer;
406
407 buffer << "OpenNN Exception: UnscalingLayer class.\n"
408 << "void set_descriptives(const Tensor<Descriptives, 1>&) method.\n"
409 << "Size of descriptives (" << new_descriptives_size << ") must be equal to number of unscaling neurons (" << neurons_number << ").\n";
410
411 throw logic_error(buffer.str());
412 }
413
414#endif
415
416 // Set all descriptives
417
418 descriptives = new_descriptives;
419}
420
421
425
426void UnscalingLayer::set_item_descriptives(const Index& i, const Descriptives& item_descriptives)
427{
428 descriptives[i] = item_descriptives;
429}
430
431
435
436void UnscalingLayer::set_minimum(const Index& i, const type& new_minimum)
437{
438 descriptives[i].set_minimum(new_minimum);
439}
440
441
445
446void UnscalingLayer::set_maximum(const Index& i, const type& new_maximum)
447{
448 descriptives[i].set_maximum(new_maximum);
449}
450
451
455
456void UnscalingLayer::set_mean(const Index& i, const type& new_mean)
457{
458 descriptives[i].set_mean(new_mean);
459}
460
461
465
466void UnscalingLayer::set_standard_deviation(const Index& i, const type& new_standard_deviation)
467{
468 descriptives[i].set_standard_deviation(new_standard_deviation);
469}
470
471
474
475void UnscalingLayer::set_scalers(const Tensor<Scaler,1>& new_unscaling_method)
476{
477 scalers = new_unscaling_method;
478}
479
480
484
485void UnscalingLayer::set_scalers(const string& new_scaling_methods_string)
486{
487#ifdef OPENNN_DEBUG
488
489 const Index neurons_number = get_neurons_number();
490
491 if(neurons_number == 0)
492 {
493 ostringstream buffer;
494
495 buffer << "OpenNN Exception: UnscalingLayer class.\n"
496 << "set_scalers(const string& new_scaling_methods_string) method.\n"
497 << "Neurons number (" << neurons_number << ")must be greater than 0.\n";
498
499 throw logic_error(buffer.str());
500 }
501
502#endif
503
504 if(new_scaling_methods_string == "NoScaling")
505 {
506 set_scalers(Scaler::NoScaling);
507 }
508 else if(new_scaling_methods_string == "MinimumMaximum")
509 {
510 set_scalers(Scaler::MinimumMaximum);
511 }
512 else if(new_scaling_methods_string == "MeanStandardDeviation")
513 {
514 set_scalers(Scaler::MeanStandardDeviation);
515 }
516 else if(new_scaling_methods_string == "StandardDeviation")
517 {
518 set_scalers(Scaler::StandardDeviation);
519 }
520 else if(new_scaling_methods_string == "Logarithm")
521 {
522 set_scalers(Scaler::Logarithm);
523 }
524 else
525 {
526 ostringstream buffer;
527
528 buffer << "OpenNN Exception: ScalingLayer class.\n"
529 << "set_scalers(const string& new_scaling_methods_string) method.\n";
530
531 throw logic_error(buffer.str());
532 }
533}
534
535
539
540void UnscalingLayer::set_scalers(const Tensor<string, 1>& new_unscaling_methods_string)
541{
542 const Index neurons_number = get_neurons_number();
543
544#ifdef OPENNN_DEBUG
545
546 if(neurons_number == 0)
547 {
548 ostringstream buffer;
549
550 buffer << "OpenNN Exception: ScalingLayer class.\n"
551 << "void set_scalers(const Tensor<string, 1>&) method.\n"
552 << "Neurons number (" << neurons_number << ") must be greater than 0.\n";
553
554 throw logic_error(buffer.str());
555 }
556
557#endif
558
559 Tensor<Scaler, 1> new_unscaling_methods(neurons_number);
560
561 for(Index i = 0; i < neurons_number; i++)
562 {
563 if(new_unscaling_methods_string(i) == "NoScaling")
564 {
565 new_unscaling_methods(i) = Scaler::NoScaling;
566 }
567 else if(new_unscaling_methods_string(i) == "MeanStandardDeviation")
568 {
569 new_unscaling_methods(i) = Scaler::MeanStandardDeviation;
570 }
571 else if(new_unscaling_methods_string(i) == "StandardDeviation")
572 {
573 new_unscaling_methods(i) = Scaler::StandardDeviation;
574 }
575 else if(new_unscaling_methods_string(i) == "MinimumMaximum")
576 {
577 new_unscaling_methods(i) = Scaler::MinimumMaximum;
578 }
579 else if(new_unscaling_methods_string(i) == "Logarithm")
580 {
581 new_unscaling_methods(i) = Scaler::Logarithm;
582 }
583 else
584 {
585 ostringstream buffer;
586
587 buffer << "OpenNN Exception: ScalingLayer class.\n"
588 << "void set_scalers(const Tensor<string, 1>&) method.\n"
589 << "Unknown scaling method: " << new_unscaling_methods_string(i) << ".\n";
590
591 throw logic_error(buffer.str());
592 }
593 }
594
595 set_scalers(new_unscaling_methods);
596}
597
598
601
602void UnscalingLayer::set_scalers(const Scaler& new_unscaling_method)
603{
604 const Index neurons_number = get_neurons_number();
605 for(Index i = 0; i < neurons_number; i++)
606 {
607 scalers(i) = new_unscaling_method;
608 }
609}
610
611
616
617void UnscalingLayer::set_display(const bool& new_display)
618{
619 display = new_display;
620}
621
622
626
627void UnscalingLayer::check_range(const Tensor<type, 1>& outputs) const
628{
629 const Index neurons_number = get_neurons_number();
630
631#ifdef OPENNN_DEBUG
632
633 const Index size = outputs.size();
634
635 if(size != neurons_number)
636 {
637 ostringstream buffer;
638
639 buffer << "OpenNN Exception: UnscalingLayer class.\n"
640 << "void check_range(const Tensor<type, 1>&) const method.\n"
641 << "Size of outputs must be equal to number of unscaling neurons.\n";
642
643 throw logic_error(buffer.str());
644 }
645
646#endif
647
648 // Check outputs
649
650 if(display)
651 {
652 for(Index i = 0; i < neurons_number; i++)
653 {
654 if(outputs[i] < descriptives[i].minimum)
655 {
656 cout << "OpenNN Warning: UnscalingLayer class.\n"
657 << "void check_range(const Tensor<type, 1>&) const method.\n"
658 << "Output variable " << i << " is less than outputs.\n";
659 }
660
661 if(outputs[i] > descriptives[i].maximum)
662 {
663 cout << "OpenNN Warning: UnscalingLayer class.\n"
664 << "void check_range(const Tensor<type, 1>&) const method.\n"
665 << "Output variable " << i << " is greater than maximum.\n";
666 }
667 }
668 }
669}
670
671
673
675{
676 const Index neurons_number = get_neurons_number();
677
678 if(neurons_number == 0)
679 {
680 return true;
681 }
682 else
683 {
684 return false;
685 }
686}
687
688
691
692Tensor<type, 2> UnscalingLayer::calculate_outputs(const Tensor<type, 2>& inputs)
693{
694 Tensor<type, 2> outputs;
695
696 const Index neurons_number = get_neurons_number();
697
698#ifdef OPENNN_DEBUG
699
700 ostringstream buffer;
701
702 const Index columns_number = inputs.dimension(1);
703
704 if(columns_number != neurons_number)
705 {
706 buffer << "OpenNN Exception: ScalingLayer class.\n"
707 << "Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
708 << "Size of inputs (" << columns_number << ") must be equal to number of scaling neurons (" << neurons_number << ").\n";
709
710 throw logic_error(buffer.str());
711 }
712
713#endif
714 const Index points_number = inputs.dimension(0);
715
716 outputs.resize(points_number, neurons_number);
717
718 for(Index i = 0; i < points_number; i++)
719 {
720 for(Index j = 0; j < neurons_number; j++)
721 {
722 if(abs(descriptives(j).minimum - descriptives(j).maximum) < type(NUMERIC_LIMITS_MIN))
723 {
724 if(display)
725 {
726 cout << "OpenNN Warning: ScalingLayer class.\n"
727 << "Tensor<type, 2> calculate_mean_standard_deviation_outputs(const Tensor<type, 2>&) const method.\n"
728 << "Standard deviation of variable " << i << " is zero.\n"
729 << "Those variables won't be scaled.\n";
730 }
731
732 outputs(j) = inputs(j);
733 }
734 else
735 {
736 if(scalers(j) == Scaler::NoScaling)
737 {
738 outputs(i,j) = inputs(i,j);
739 }
740
741 else if(scalers(j) == Scaler::MinimumMaximum)
742 {
743 const type slope = (descriptives(j).maximum-descriptives(j).minimum)/(max_range-min_range);
744
745 const type intercept = -(min_range*descriptives(j).maximum-max_range*descriptives(j).minimum)/(max_range-min_range);
746
747 outputs(i,j) = inputs(i,j)*slope + intercept;
748 }
749
750 else if(scalers(j) == Scaler::MeanStandardDeviation)
751 {
752 const type slope = descriptives(j).standard_deviation;
753
754 const type intercept = descriptives(j).mean;
755
756 outputs(i,j) = inputs(i,j)*slope + intercept;
757 }
758 else if(scalers(j) == Scaler::StandardDeviation)
759 {
760 const type standard_deviation = descriptives(j).standard_deviation;
761
762 outputs(i,j) = inputs(i,j)*standard_deviation;
763 }
764 else if(scalers(j) == Scaler::Logarithm)
765 {
766 outputs(i,j) = exp(inputs(i,j));
767 }
768 else
769 {
770 ostringstream buffer;
771
772 buffer << "OpenNN Exception: ScalingLayer class\n"
773 << "Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
774 << "Unknown scaling method.\n";
775
776 throw logic_error(buffer.str());
777 }
778 }
779 }
780 }
781
782 return outputs;
783}
784
785
788
790{
791 ostringstream buffer;
792
793 const Index neurons_number = get_neurons_number();
794
795 // Unscaling layer
796
797 file_stream.OpenElement("UnscalingLayer");
798
799 // Unscaling neurons number
800
801 file_stream.OpenElement("UnscalingNeuronsNumber");
802
803 buffer.str("");
804 buffer << neurons_number;
805
806 file_stream.PushText(buffer.str().c_str());
807
808 file_stream.CloseElement();
809
810 // Descriptives
811
812 const Tensor<string, 1> scalers = write_unscaling_methods();
813
814 for(Index i = 0; i < neurons_number; i++)
815 {
816 file_stream.OpenElement("Descriptives");
817
818 file_stream.PushAttribute("Index", int(i+1));
819
820 // Minimum
821
822 file_stream.OpenElement("Minimum");
823
824 buffer.str("");
825 buffer << descriptives[i].minimum;
826
827 file_stream.PushText(buffer.str().c_str());
828
829 file_stream.CloseElement();
830
831 // Maximum
832
833 file_stream.OpenElement("Maximum");
834
835 buffer.str("");
836 buffer << descriptives[i].maximum;
837
838 file_stream.PushText(buffer.str().c_str());
839
840 file_stream.CloseElement();
841
842 // Mean
843
844 file_stream.OpenElement("Mean");
845
846 buffer.str("");
847 buffer << descriptives[i].mean;
848
849 file_stream.PushText(buffer.str().c_str());
850
851 file_stream.CloseElement();
852
853 // Standard deviation
854
855 file_stream.OpenElement("StandardDeviation");
856
857 buffer.str("");
858 buffer << descriptives[i].standard_deviation;
859
860 file_stream.PushText(buffer.str().c_str());
861
862 file_stream.CloseElement();
863
864 // Unscaling method
865
866 file_stream.OpenElement("Scaler");
867
868 buffer.str("");
869 buffer << scalers(i);
870
871 file_stream.PushText(buffer.str().c_str());
872
873 file_stream.CloseElement();
874
875 // Unscaling neuron (end tag)
876
877 file_stream.CloseElement();
878 }
879
880 // Unscaling layer (end tag)
881
882 file_stream.CloseElement();
883}
884
885
886// void from_XML(const tinyxml2::XMLDocument&) method
887
890
892{
893 ostringstream buffer;
894
895 const tinyxml2::XMLElement* root_element = document.FirstChildElement("UnscalingLayer");
896
897 if(!root_element)
898 {
899 buffer << "OpenNN Exception: UnscalingLayer class.\n"
900 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
901 << "Unscaling layer element is nullptr.\n";
902
903 throw logic_error(buffer.str());
904 }
905
906 // Unscaling neurons number
907
908 const tinyxml2::XMLElement* neurons_number_element = root_element->FirstChildElement("UnscalingNeuronsNumber");
909
910 if(!neurons_number_element)
911 {
912 buffer << "OpenNN Exception: UnscalingLayer class.\n"
913 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
914 << "Unscaling neurons number element is nullptr.\n";
915
916 throw logic_error(buffer.str());
917 }
918
919 const Index neurons_number = static_cast<Index>(atoi(neurons_number_element->GetText()));
920
921 set(neurons_number);
922
923 unsigned index = 0; // Index does not work
924
925 const tinyxml2::XMLElement* start_element = neurons_number_element;
926
927 for(Index i = 0; i < neurons_number; i++)
928 {
929 const tinyxml2::XMLElement* descriptives_element = start_element->NextSiblingElement("Descriptives");
930 start_element = descriptives_element;
931
932 if(!descriptives_element)
933 {
934 buffer << "OpenNN Exception: UnscalingLayer class.\n"
935 << "void from_XML(const tinyxml2::XMLElement*) method.\n"
936 << "Descriptives of unscaling neuron " << i+1 << " is nullptr.\n";
937
938 throw logic_error(buffer.str());
939 }
940
941 descriptives_element->QueryUnsignedAttribute("Index", &index);
942
943 if(index != i+1)
944 {
945 buffer << "OpenNN Exception: UnscalingLayer class.\n"
946 << "void from_XML(const tinyxml2::XMLElement*) method.\n"
947 << "Index " << index << " is not correct.\n";
948
949 throw logic_error(buffer.str());
950 }
951
952 // Minimum
953
954 const tinyxml2::XMLElement* minimum_element = descriptives_element->FirstChildElement("Minimum");
955
956 if(!minimum_element)
957 {
958 buffer << "OpenNN Exception: UnscalingLayer class.\n"
959 << "void from_XML(const tinyxml2::XMLElement*) method.\n"
960 << "Minimum element " << i+1 << " is nullptr.\n";
961
962 throw logic_error(buffer.str());
963 }
964
965 if(minimum_element->GetText())
966 {
967 descriptives(i).minimum = static_cast<type>(atof(minimum_element->GetText()));
968 }
969
970 // Maximum
971
972 const tinyxml2::XMLElement* maximum_element = descriptives_element->FirstChildElement("Maximum");
973
974 if(!maximum_element)
975 {
976 buffer << "OpenNN Exception: UnscalingLayer class.\n"
977 << "void from_XML(const tinyxml2::XMLElement*) method.\n"
978 << "Maximum element " << i+1 << " is nullptr.\n";
979
980 throw logic_error(buffer.str());
981 }
982
983 if(maximum_element->GetText())
984 {
985 descriptives(i).maximum = static_cast<type>(atof(maximum_element->GetText()));
986 }
987
988 // Mean
989
990 const tinyxml2::XMLElement* mean_element = descriptives_element->FirstChildElement("Mean");
991
992 if(!mean_element)
993 {
994 buffer << "OpenNN Exception: UnscalingLayer class.\n"
995 << "void from_XML(const tinyxml2::XMLElement*) method.\n"
996 << "Mean element " << i+1 << " is nullptr.\n";
997
998 throw logic_error(buffer.str());
999 }
1000
1001 if(mean_element->GetText())
1002 {
1003 descriptives(i).mean = static_cast<type>(atof(mean_element->GetText()));
1004 }
1005
1006 // Standard deviation
1007
1008 const tinyxml2::XMLElement* standard_deviation_element = descriptives_element->FirstChildElement("StandardDeviation");
1009
1010 if(!standard_deviation_element)
1011 {
1012 buffer << "OpenNN Exception: UnscalingLayer class.\n"
1013 << "void from_XML(const tinyxml2::XMLElement*) method.\n"
1014 << "Standard deviation element " << i+1 << " is nullptr.\n";
1015
1016 throw logic_error(buffer.str());
1017 }
1018
1019 if(standard_deviation_element->GetText())
1020 {
1021 descriptives(i).standard_deviation = static_cast<type>(atof(standard_deviation_element->GetText()));
1022 }
1023
1024 // Unscaling method
1025
1026 const tinyxml2::XMLElement* unscaling_method_element = descriptives_element->FirstChildElement("Scaler");
1027
1028 if(!unscaling_method_element)
1029 {
1030 buffer << "OpenNN Exception: UnscalingLayer class.\n"
1031 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1032 << "Unscaling method element " << i+1 << " is nullptr.\n";
1033
1034 throw logic_error(buffer.str());
1035 }
1036
1037 const string new_method = unscaling_method_element->GetText();
1038
1039 if(new_method == "NoScaling")
1040 {
1041 scalers[i] = Scaler::NoScaling;
1042 }
1043 else if(new_method == "MinimumMaximum")
1044 {
1045 scalers[i] = Scaler::MinimumMaximum;
1046 }
1047 else if(new_method == "MeanStandardDeviation")
1048 {
1049 scalers[i] = Scaler::MeanStandardDeviation;
1050 }
1051 else if(new_method == "StandardDeviation")
1052 {
1053 scalers[i] = Scaler::StandardDeviation;
1054 }
1055 else if(new_method == "Logarithm")
1056 {
1057 scalers[i] = Scaler::Logarithm;
1058 }
1059 }
1060
1061 // Display
1062
1063 const tinyxml2::XMLElement* element = root_element->FirstChildElement("Display");
1064
1065 if(element)
1066 {
1067 string new_display_string = element->GetText();
1068
1069 try
1070 {
1071 set_display(new_display_string != "0");
1072 }
1073 catch(const logic_error& e)
1074 {
1075 cerr << e.what() << endl;
1076 }
1077 }
1078}
1079
1080
1084
1086{
1087 const Index neurons_number = get_neurons_number();
1088
1089 ostringstream buffer;
1090
1091 buffer.precision(10);
1092
1093 buffer << "vector<float> " << layer_name << "(const vector<float>& inputs)\n{" << endl;
1094
1095 buffer << "\tvector<float> outputs(" << neurons_number << ");\n" << endl;
1096
1097 for(Index i = 0; i < neurons_number; i++)
1098 {
1099 if(scalers(i) == Scaler::NoScaling)
1100 {
1101 buffer << "\toutputs[" << i << "] = inputs[" << i << "];" << endl;
1102 }
1103 else if(scalers(i) == Scaler::MinimumMaximum)
1104 {
1105 if(abs(descriptives(i).minimum - descriptives(i).maximum) < type(NUMERIC_LIMITS_MIN))
1106 {
1107 buffer << "\toutputs[" << i << "] = " << descriptives(i).minimum <<";\n";
1108 }
1109 else
1110 {
1111 const type slope = (descriptives(i).maximum-descriptives(i).minimum)/(max_range-min_range);
1112
1113 const type intercept = descriptives(i).minimum - min_range*(descriptives(i).maximum-descriptives(i).minimum)/(max_range-min_range);
1114
1115 buffer << "\toutputs[" << i << "] = inputs[" << i << "]*"<<slope<<"+"<<intercept<<";\n";
1116 }
1117 }
1118 else if(scalers(i) == Scaler::MeanStandardDeviation)
1119 {
1120 const type standard_deviation = descriptives(i).standard_deviation;
1121
1122 const type mean = descriptives(i).mean;
1123
1124 buffer << "\toutputs[" << i << "] = inputs[" << i << "]*"<<standard_deviation<<"+"<<mean<<";\n";
1125 }
1126 else if(scalers(i) == Scaler::StandardDeviation)
1127 {
1128 const type standard_deviation = descriptives(i).standard_deviation;
1129
1130 buffer << "\toutputs[" << i << "] = inputs[" << i << "]*"<<standard_deviation<<";\n";
1131 }
1132 else if(scalers(i) == Scaler::Logarithm)
1133 {
1134 buffer << "\toutputs[" << i << "] =exp( inputs[" << i << "]);";
1135 }
1136 else
1137 {
1138 ostringstream buffer;
1139
1140 buffer << "OpenNN Exception: ScalingLayer class.\n"
1141 << "string write_expression() const method.\n"
1142 << "Unknown inputs scaling method.\n";
1143
1144 throw logic_error(buffer.str());
1145 }
1146 }
1147
1148 buffer << "\n\treturn outputs;\n}" << endl;
1149
1150 return buffer.str();
1151}
1152
1153
1157
1159{
1160 const Index neurons_number = get_neurons_number();
1161
1162 ostringstream buffer;
1163
1164 buffer.precision(10);
1165
1166 buffer << "\tdef " << layer_name << "(self,inputs):\n" << endl;
1167
1168 buffer << "\t\toutputs = [None] * "<<neurons_number<<"\n" << endl;
1169
1170 for(Index i = 0; i < neurons_number; i++)
1171 {
1172 if(scalers(i) == Scaler::NoScaling)
1173 {
1174 buffer << "\t\toutputs[" << i << "] = inputs[" << i << "]" << endl;
1175 }
1176 else if(scalers(i) == Scaler::MinimumMaximum)
1177 {
1178 if(abs(descriptives(i).minimum - descriptives(i).maximum) < type(NUMERIC_LIMITS_MIN))
1179 {
1180 buffer << "\toutputs[" << i << "] = " << descriptives(i).minimum <<"\n";
1181 }
1182 else
1183 {
1184 const type slope = (descriptives(i).maximum-descriptives(i).minimum)/(max_range-min_range);
1185
1186 const type intercept
1187 = descriptives(i).minimum - min_range*(descriptives(i).maximum-descriptives(i).minimum)/(max_range-min_range);
1188
1189 buffer << "\t\toutputs[" << i << "] = inputs[" << i << "]*"<<slope<<"+"<<intercept<<"\n";
1190 }
1191 }
1192 else if(scalers(i) == Scaler::MeanStandardDeviation)
1193 {
1194 const type standard_deviation = descriptives(i).standard_deviation;
1195
1196 const type mean = descriptives(i).mean;
1197
1198 buffer << "\t\toutputs[" << i << "] = inputs[" << i << "]*"<<standard_deviation<<"+"<<mean<<"\n";
1199 }
1200 else if(scalers(i) == Scaler::StandardDeviation)
1201 {
1202 const type standard_deviation = descriptives(i).standard_deviation;
1203
1204 buffer << "\t\toutputs[" << i << "] = inputs[" << i << "]*"<<standard_deviation<<"\n";
1205 }
1206 else if(scalers(i) == Scaler::Logarithm)
1207 {
1208 buffer << "\t\toutputs[" << i << "] = np.exp( inputs[" << i << "])\n";
1209 }
1210 else
1211 {
1212 ostringstream buffer;
1213
1214 buffer << "OpenNN Exception: ScalingLayer class.\n"
1215 << "string write_expression() const method.\n"
1216 << "Unknown inputs scaling method.\n";
1217
1218 throw logic_error(buffer.str());
1219 }
1220 }
1221
1222 buffer << "\n\t\treturn outputs\n" << endl;
1223
1224 return buffer.str();
1225}
1226
1227}
1228
1229
1230// OpenNN: Open Neural Networks Library.
1231// Copyright(C) 2005-2021 Artificial Intelligence Techniques, SL.
1232//
1233// This library is free software; you can redistribute it and/or
1234// modify it under the terms of the GNU Lesser General Public
1235// License as published by the Free Software Foundation; either
1236// version 2.1 of the License, or any later version.
1237//
1238// This library is distributed in the hope that it will be useful,
1239// but WITHOUT ANY WARRANTY; without even the implied warranty of
1240// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1241// Lesser General Public License for more details.
1242
1243// You should have received a copy of the GNU Lesser General Public
1244// License along with this library; if not, write to the Free Software
1245// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This abstract class represents the concept of layer of neurons in OpenNN.
Definition: layer.h:53
string layer_name
Layer name.
Definition: layer.h:179
Type layer_type
Layer type.
Definition: layer.h:183
This class represents a layer of unscaling neurons.
string write_expression_c() const
void set_maximum(const Index &, const type &)
void set_descriptives(const Tensor< Descriptives, 1 > &)
void set_item_descriptives(const Index &, const Descriptives &)
void set_minimum(const Index &, const type &)
const bool & get_display() const
void set_standard_deviation(const Index &, const type &)
Index get_inputs_number() const
Returns the number of inputs.
string write_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
Returns a string with the expression of the inputs scaling process.
void from_XML(const tinyxml2::XMLDocument &)
const Tensor< Scaler, 1 > get_unscaling_method() const
string write_expression_python() const
Tensor< Scaler, 1 > scalers
Unscaling method for the output variables.
void check_range(const Tensor< type, 1 > &) const
bool display
Display warning messages to screen.
bool is_empty() const
Returns true if the number of unscaling neurons is zero, and false otherwise.
void set()
Sets the unscaling layer to be empty.
Tensor< type, 1 > get_minimums() const
Tensor< type, 1 > get_maximums() const
virtual ~UnscalingLayer()
Destructor.
Tensor< string, 1 > write_unscaling_method_text() const
void set_scalers(const Tensor< Scaler, 1 > &)
Tensor< type, 2 > calculate_outputs(const Tensor< type, 2 > &)
Index get_neurons_number() const
Returns the number of unscaling neurons in this layer.
Tensor< Descriptives, 1 > get_descriptives() const
UnscalingLayer()
Default constructor.
type min_range
min and max range for unscaling
Tensor< Descriptives, 1 > descriptives
Descriptives of output variables.
void set_display(const bool &)
void write_XML(tinyxml2::XMLPrinter &) const
void set_mean(const Index &, const type &)
void set_min_max_range(const type min, const type max)
Tensor< string, 1 > write_unscaling_methods() const
Returns a vector of strings with the name of the method used for each unscaling neuron.
XMLError QueryUnsignedAttribute(const char *name, unsigned int *value) const
See QueryIntAttribute()
Definition: tinyxml2.h:1328
const XMLElement * NextSiblingElement(const char *name=nullptr) const
Get the next(right) sibling element of this node, with an optionally supplied name.
Definition: tinyxml2.cpp:1059
void PushText(const char *text, bool cdata=false)
Add a text node.
Definition: tinyxml2.cpp:2878
void PushAttribute(const char *name, const char *value)
If streaming, add an attribute to an open element.
Definition: tinyxml2.cpp:2783
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
Definition: tinyxml2.cpp:2834
HALF_CONSTEXPR half abs(half arg)
Definition: half.hpp:2735
half exp(half arg)
Definition: half.hpp:2936
This structure contains the simplest Descriptives for a set, variable, etc. It includes :
Definition: statistics.h:40