9#include "scaling_layer.h"
30 set(new_neurons_number);
36 set(new_inputs_dimensions);
47 set(new_descriptives);
58Tensor<Index, 1> ScalingLayer::get_outputs_dimensions()
const
60 return input_variables_dimensions;
70Index ScalingLayer::get_neurons_number()
const
98 const Index neurons_number = get_neurons_number();
100 Tensor<type, 1> minimums(neurons_number);
102 for(Index i = 0; i < neurons_number; i++)
115 const Index neurons_number = get_neurons_number();
117 Tensor<type, 1> maximums(neurons_number);
119 for(Index i = 0; i < neurons_number; i++)
132 const Index neurons_number = get_neurons_number();
134 Tensor<type, 1> means(neurons_number);
136 for(Index i = 0; i < neurons_number; i++)
149 const Index neurons_number = get_neurons_number();
151 Tensor<type, 1> standard_deviations(neurons_number);
153 for(Index i = 0; i < neurons_number; i++)
155 standard_deviations[i] =
descriptives[i].standard_deviation;
158 return standard_deviations;
174 const Index neurons_number = get_neurons_number();
176 Tensor<string, 1> scaling_methods_strings(neurons_number);
178 for(Index i = 0; i < neurons_number; i++)
180 if(
scalers[i] == Scaler::NoScaling)
182 scaling_methods_strings[i] =
"NoScaling";
184 else if(
scalers[i] == Scaler::MinimumMaximum)
186 scaling_methods_strings[i] =
"MinimumMaximum";
188 else if(
scalers[i] == Scaler::MeanStandardDeviation)
190 scaling_methods_strings[i] =
"MeanStandardDeviation";
192 else if(
scalers[i] == Scaler::StandardDeviation)
194 scaling_methods_strings[i] =
"StandardDeviation";
196 else if(
scalers[i] == Scaler::Logarithm)
198 scaling_methods_strings[i] =
"Logarithm";
202 ostringstream buffer;
204 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
205 <<
"Tensor<string, 1> write_scalers() const method.\n"
206 <<
"Unknown " << i <<
" scaling method.\n";
208 throw logic_error(buffer.str());
212 return scaling_methods_strings;
221 const Index neurons_number = get_neurons_number();
225 if(neurons_number == 0)
227 ostringstream buffer;
229 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
230 <<
"Tensor<string, 1> write_scalers() const method.\n"
231 <<
"Neurons number must be greater than 0.\n";
233 throw logic_error(buffer.str());
238 Tensor<string, 1> scaling_methods_strings(neurons_number);
240 for(Index i = 0; i < neurons_number; i++)
242 if(
scalers[i] == Scaler::NoScaling)
244 scaling_methods_strings[i] =
"no scaling";
246 else if(
scalers[i] == Scaler::MeanStandardDeviation)
248 scaling_methods_strings[i] =
"mean and standard deviation";
250 else if(
scalers[i] == Scaler::StandardDeviation)
252 scaling_methods_strings[i] =
"standard deviation";
254 else if(
scalers[i] == Scaler::MinimumMaximum)
256 scaling_methods_strings[i] =
"minimum and maximum";
258 else if(
scalers[i] == Scaler::Logarithm)
260 scaling_methods_strings[i] =
"Logarithm";
264 ostringstream buffer;
266 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
267 <<
"Tensor<string, 1> write_scalers_text() const method.\n"
268 <<
"Unknown " << i <<
" scaling method.\n";
270 throw logic_error(buffer.str());
274 return scaling_methods_strings;
307 scalers.resize(new_inputs_number);
309 scalers.setConstant(Scaler::MeanStandardDeviation);
317 const Tensor<Index,0> dimension_product = new_inputs_dimensions.prod();
321 scalers.resize(dimension_product(0));
322 scalers.setConstant(Scaler::MeanStandardDeviation);
324 input_variables_dimensions.resize(new_inputs_dimensions.size());
326 input_variables_dimensions = new_inputs_dimensions;
341 scalers.resize(new_descriptives.size());
343 scalers.setConstant(Scaler::MeanStandardDeviation);
349void ScalingLayer::set(
const Tensor<Descriptives, 1>& new_descriptives,
const Tensor<Scaler, 1>& new_scalers)
364 from_XML(new_scaling_layer_document);
368void ScalingLayer::set_inputs_number(
const Index& new_inputs_number)
372 scalers.resize(new_inputs_number);
374 scalers.setConstant(Scaler::MeanStandardDeviation);
378void ScalingLayer::set_neurons_number(
const Index& new_neurons_number)
382 scalers.resize(new_neurons_number);
384 scalers.setConstant(Scaler::MeanStandardDeviation);
430 const Index new_descriptives_size = new_descriptives.size();
432 const Index neurons_number = get_neurons_number();
434 if(new_descriptives_size != neurons_number)
436 ostringstream buffer;
438 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
439 <<
"void set_descriptives(const Tensor<Descriptives, 1>&) method.\n"
440 <<
"Size of descriptives (" << new_descriptives_size <<
") is not equal to number of scaling neurons (" << neurons_number <<
").\n";
442 throw logic_error(buffer.str());
497 descriptives(i).set_standard_deviation(new_standard_deviation);
508 const Index neurons_number = get_neurons_number();
510 if(neurons_number == 0)
512 ostringstream buffer;
514 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
515 <<
"void set_scalers(const Tensor<Scaler, 1>&) method.\n"
516 <<
"Neurons number (" << neurons_number <<
") must be greater than 0.\n";
518 throw logic_error(buffer.str());
533 const Index neurons_number = get_neurons_number();
537 if(neurons_number == 0)
539 ostringstream buffer;
541 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
542 <<
"void set_scalers(const Tensor<string, 1>&) method.\n"
543 <<
"Neurons number (" << neurons_number <<
") must be greater than 0.\n";
545 throw logic_error(buffer.str());
550 Tensor<Scaler, 1> new_scaling_methods(neurons_number);
552 for(Index i = 0; i < neurons_number; i++)
554 if(new_scaling_methods_string(i) ==
"NoScaling")
556 new_scaling_methods(i) = Scaler::NoScaling;
558 else if(new_scaling_methods_string(i) ==
"MinimumMaximum")
560 new_scaling_methods(i) = Scaler::MinimumMaximum;
562 else if(new_scaling_methods_string(i) ==
"MeanStandardDeviation")
564 new_scaling_methods(i) = Scaler::MeanStandardDeviation;
566 else if(new_scaling_methods_string(i) ==
"StandardDeviation")
568 new_scaling_methods(i) = Scaler::StandardDeviation;
570 else if(new_scaling_methods_string(i) ==
"Logarithm")
572 new_scaling_methods(i) = Scaler::Logarithm;
576 ostringstream buffer;
578 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
579 <<
"void set_scalers(const Tensor<string, 1>&) method.\n"
580 <<
"Unknown scaling method: " << new_scaling_methods_string[i] <<
".\n";
582 throw logic_error(buffer.str());
596 const Index neurons_number = get_neurons_number();
600 if(neurons_number == 0)
602 ostringstream buffer;
604 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
605 <<
"void set_scalers(const Tensor<string, 1>&) method.\n"
606 <<
"Neurons number (" << neurons_number <<
")must be greater than 0.\n";
608 throw logic_error(buffer.str());
613 Tensor<Scaler, 1> new_scaling_methods(neurons_number);
615 for(Index i = 0; i < neurons_number; i++)
617 if(new_scaling_methods_string ==
"NoScaling")
619 new_scaling_methods(i) = Scaler::NoScaling;
621 else if(new_scaling_methods_string ==
"MeanStandardDeviation")
623 new_scaling_methods(i) = Scaler::MeanStandardDeviation;
625 else if(new_scaling_methods_string ==
"MinimumMaximum")
627 new_scaling_methods(i) = Scaler::MinimumMaximum;
629 else if(new_scaling_methods_string ==
"StandardDeviation")
631 new_scaling_methods(i) = Scaler::StandardDeviation;
633 else if(new_scaling_methods_string ==
"Logarithm")
635 new_scaling_methods(i) = Scaler::Logarithm;
639 ostringstream buffer;
641 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
642 <<
"void set_scalers(const Tensor<string, 1>&) method.\n"
643 <<
"Unknown scaling method: " << new_scaling_methods_string[i] <<
".\n";
645 throw logic_error(buffer.str());
658 const Index neurons_number = get_neurons_number();
660 for(Index i = 0; i < neurons_number; i++)
662 scalers(i) = new_scaling_method;
682 const Index inputs_number = get_neurons_number();
684 if(inputs_number == 0)
703 const Index inputs_number = get_neurons_number();
707 const Index size = inputs.size();
709 if(size != inputs_number)
711 ostringstream buffer;
713 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
714 <<
"void check_range(const Tensor<type, 1>&) const method.\n"
715 <<
"Size of inputs must be equal to number of inputs.\n";
717 throw logic_error(buffer.str());
726 for(Index i = 0; i < inputs_number; i++)
730 cout <<
"OpenNN Warning: ScalingLayer class.\n"
731 <<
"void check_range(const Tensor<type, 1>&) const method.\n"
732 <<
"Input value " << i <<
" is less than corresponding minimum.\n";
737 cout <<
"OpenNN Warning: ScalingLayer class.\n"
738 <<
"void check_range(const Tensor<type, 1>&) const method.\n"
739 <<
"Input value " << i <<
" is greater than corresponding maximum.\n";
751 Tensor<type, 2> outputs;
753 const Index neurons_number = get_neurons_number();
757 ostringstream buffer;
759 const Index columns_number = inputs.dimension(1);
761 if(columns_number != neurons_number)
763 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
764 <<
"Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
765 <<
"Size of inputs (" << columns_number <<
") must be equal to number of scaling neurons (" << neurons_number <<
").\n";
767 throw logic_error(buffer.str());
772 const Index points_number = inputs.dimension(0);
774 outputs.resize(points_number, neurons_number);
776 for(Index i = 0; i < points_number; i++)
778 for(Index j = 0; j < neurons_number; j++)
784 cout <<
"OpenNN Warning: ScalingLayer class.\n"
785 <<
"Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
786 <<
"Standard deviation of variable " << i <<
" is zero.\n"
787 <<
"Those variables won't be scaled.\n";
790 outputs(j) = inputs(j);
794 if(
scalers(j) == Scaler::NoScaling)
796 outputs(i,j) = inputs(i,j);
798 else if(
scalers(j) == Scaler::MinimumMaximum)
803 const type intercept =
806 outputs(i,j) = inputs(i,j)*slope + intercept;
808 else if(
scalers(j) == Scaler::MeanStandardDeviation)
810 const type slope =
static_cast<type
>(1)/
descriptives(j).standard_deviation;
814 outputs(i,j) = inputs(i,j)*slope + intercept;
817 else if(
scalers(j) == Scaler::StandardDeviation)
819 outputs(i,j) = inputs(i,j)/
descriptives(j).standard_deviation;
821 else if(
scalers(j) == Scaler::Logarithm)
823 outputs(i,j) =
log(inputs(i,j));
827 ostringstream buffer;
829 buffer <<
"OpenNN Exception: ScalingLayer class\n"
830 <<
"Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
831 <<
"Unknown scaling method.\n";
833 throw logic_error(buffer.str());
845 Tensor<type, 4> outputs;
847 const Index neurons_number = get_neurons_number();
851 ostringstream buffer;
853 const Index columns_number = inputs.dimension(1) * inputs.dimension(2) * inputs.dimension(3);
855 if(columns_number != neurons_number)
857 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
858 <<
"Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
859 <<
"Size of inputs (" << columns_number <<
") must be equal to number of scaling neurons (" << neurons_number <<
").\n";
861 throw logic_error(buffer.str());
866 const Index points_number = inputs.dimension(0);
868 for(Index i = 0; i < points_number; i++)
870 for(Index j = 0; j < neurons_number; j++)
872 const Index channel_index = j%inputs.dimension(1);
873 const Index row_index = (j/(inputs.dimension(1)))%inputs.dimension(2);
874 const Index column_index = (j/(inputs.dimension(1) * inputs.dimension(2)))%inputs.dimension(3);
880 cout <<
"OpenNN Warning: ScalingLayer class.\n"
881 <<
"Tensor<type, 2> calculate_mean_standard_deviation_outputs(const Tensor<type, 2>&) const method.\n"
882 <<
"Standard deviation of variable " << i <<
" is zero.\n"
883 <<
"Those variables won't be scaled.\n";
886 outputs(j) = inputs(j);
890 if(
scalers(j) == Scaler::NoScaling)
892 outputs(i, channel_index, row_index, column_index) = inputs(i, channel_index, row_index, column_index);
894 else if(
scalers(j) == Scaler::MinimumMaximum)
896 outputs(i, channel_index, row_index, column_index) =
static_cast<type
>(2)*(inputs(i, channel_index, row_index, column_index) -
descriptives(j).minimum)/(
descriptives(j).maximum-
descriptives(j).minimum) -
static_cast<type
>(1);
898 else if(
scalers(j) == Scaler::MeanStandardDeviation)
900 outputs(i, channel_index, row_index, column_index) = (inputs(i, channel_index, row_index, column_index) -
descriptives(j).mean)/
descriptives(j).standard_deviation;
902 else if(
scalers(j) == Scaler::StandardDeviation)
904 outputs(i, channel_index, row_index, column_index) = inputs(i, channel_index, row_index, column_index)/
descriptives(j).standard_deviation;
906 else if(
scalers(j) == Scaler::Logarithm)
908 outputs(i, channel_index, row_index, column_index) =
log(inputs(i, channel_index, row_index, column_index));
912 ostringstream buffer;
914 buffer <<
"OpenNN Exception: ScalingLayer class\n"
915 <<
"Tensor<type, 2> calculate_outputs(const Tensor<type, 2>&) const method.\n"
916 <<
"Unknown scaling method.\n";
918 throw logic_error(buffer.str());
934 const Index inputs_number = get_neurons_number();
936 ostringstream buffer;
938 buffer.precision(10);
940 for(Index i = 0; i < inputs_number; i++)
942 buffer << outputs_names(i) <<
" = " << inputs_names(i) <<
";\n";
955 const Index inputs_number = get_neurons_number();
957 ostringstream buffer;
959 buffer.precision(10);
961 for(Index i = 0; i < inputs_number; i++)
963 buffer << outputs_names(i) <<
" = 2*(" << inputs_names(i) <<
"-(" <<
descriptives(i).minimum <<
"))/(" <<
descriptives(i).maximum <<
"-(" <<
descriptives(i).minimum <<
"))-1;\n";
976 const Index inputs_number = get_neurons_number();
978 ostringstream buffer;
980 buffer.precision(10);
982 for(Index i = 0; i < inputs_number; i++)
984 buffer << outputs_names(i) <<
" = (" << inputs_names(i) <<
"-(" <<
descriptives(i).mean <<
"))/" <<
descriptives(i).standard_deviation <<
";\n";
997 const Index inputs_number = get_neurons_number();
999 ostringstream buffer;
1001 buffer.precision(10);
1003 for(Index i = 0; i < inputs_number; i++)
1005 buffer << outputs_names(i) <<
" = " << inputs_names(i) <<
"/(" <<
descriptives(i).standard_deviation <<
");\n";
1008 return buffer.str();
1016 const Index neurons_number = get_neurons_number();
1018 ostringstream buffer;
1020 buffer.precision(10);
1022 for(Index i = 0; i < neurons_number; i++)
1024 if(
scalers(i) == Scaler::NoScaling)
1026 buffer <<
"scaled_" << inputs_names(i) <<
" = " << inputs_names(i) <<
";\n";
1028 else if(
scalers(i) == Scaler::MinimumMaximum)
1032 else if(
scalers(i) == Scaler::MeanStandardDeviation)
1034 buffer <<
"scaled_" << inputs_names(i) <<
" = (" << inputs_names(i) <<
"-" <<
descriptives(i).mean <<
")/" <<
descriptives(i).standard_deviation <<
";\n";
1036 else if(
scalers(i) == Scaler::StandardDeviation)
1038 buffer <<
"scaled_" << inputs_names(i) <<
" = " << inputs_names(i) <<
"/(" <<
descriptives(i).standard_deviation <<
");\n";
1042 ostringstream buffer;
1044 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1045 <<
"string write_expression() const method.\n"
1046 <<
"Unknown inputs scaling method.\n";
1048 throw logic_error(buffer.str());
1052 string expression = buffer.str();
1054 replace(expression,
"+-",
"-");
1055 replace(expression,
"--",
"+");
1067 const Index neurons_number = get_neurons_number();
1069 ostringstream buffer;
1071 buffer.precision(10);
1073 buffer <<
"vector<float> " <<
layer_name <<
"(const vector<float>& inputs)\n{" << endl;
1075 buffer <<
"\tvector<float> outputs(" << neurons_number <<
");\n" << endl;
1077 for(Index i = 0; i < neurons_number; i++)
1079 if(
scalers(i) == Scaler::NoScaling)
1081 buffer <<
"\toutputs[" << i <<
"] = inputs[" << i <<
"];" << endl;
1083 else if(
scalers(i) == Scaler::MinimumMaximum)
1089 buffer <<
"\toutputs[" << i <<
"] = inputs[" << i <<
"]*"<<slope<<
"+"<<intercept<<
";\n";
1091 else if(
scalers(i) == Scaler::MeanStandardDeviation)
1093 const type standard_deviation =
descriptives(i).standard_deviation;
1097 buffer <<
"\toutputs[" << i <<
"] = (inputs[" << i <<
"]-"<<mean<<
")/"<<standard_deviation<<
";\n";
1099 else if(
scalers(i) == Scaler::StandardDeviation)
1101 const type standard_deviation =
descriptives(i).standard_deviation;
1103 buffer <<
"\toutputs[" << i <<
"] = inputs[" << i <<
"]/" << standard_deviation <<
" ;" << endl;
1105 else if(
scalers(i) == Scaler::Logarithm)
1107 buffer <<
"\toutputs[" << i <<
"] = log(inputs[" << i <<
"])"<<
" ;" << endl;
1111 ostringstream buffer;
1113 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1114 <<
"string write_expression() const method.\n"
1115 <<
"Unknown inputs scaling method.\n";
1117 throw logic_error(buffer.str());
1121 buffer <<
"\n\treturn outputs;\n}" << endl;
1123 return buffer.str();
1127string ScalingLayer::write_expression_python()
const
1129 const Index neurons_number = get_neurons_number();
1131 ostringstream buffer;
1133 buffer.precision(10);
1135 buffer <<
"\tdef " <<
layer_name <<
"(self,inputs):\n" << endl;
1137 buffer <<
"\t\toutputs = [None] * "<<neurons_number<<
"\n" << endl;
1139 for(Index i = 0; i < neurons_number; i++)
1141 if(
scalers(i) == Scaler::NoScaling)
1143 buffer <<
"\t\toutputs[" << i <<
"] = inputs[" << i <<
"]\n" << endl;
1145 else if(
scalers(i) == Scaler::MinimumMaximum)
1151 buffer <<
"\t\toutputs[" << i <<
"] = inputs[" << i <<
"]*"<<slope<<
"+"<<intercept<<
"\n";
1153 else if(
scalers(i) == Scaler::MeanStandardDeviation)
1155 const type standard_deviation =
descriptives(i).standard_deviation;
1159 buffer <<
"\t\toutputs[" << i <<
"] = (inputs[" << i <<
"]-"<<mean<<
")/"<<standard_deviation<<
"\n";
1161 else if(
scalers(i) == Scaler::StandardDeviation)
1163 buffer <<
"\t\toutputs[" << i <<
"] = inputs[" << i <<
"]/" <<
descriptives(i).standard_deviation <<
"\n " << endl;
1165 else if(
scalers(i) == Scaler::Logarithm)
1167 buffer <<
"\t\toutputs[" << i <<
"] = np.log(inputs[" << i <<
"])\n"<< endl;
1171 ostringstream buffer;
1173 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1174 <<
"string write_expression() const method.\n"
1175 <<
"Unknown inputs scaling method.\n";
1177 throw logic_error(buffer.str());
1181 buffer <<
"\n\t\treturn outputs;\n" << endl;
1183 return buffer.str();
1192 ostringstream buffer;
1194 const Index neurons_number = get_neurons_number();
1198 file_stream.OpenElement(
"ScalingLayer");
1202 file_stream.OpenElement(
"ScalingNeuronsNumber");
1205 buffer << neurons_number;
1207 file_stream.
PushText(buffer.str().c_str());
1211 const Tensor<string, 1> scaling_methods_string =
write_scalers();
1215 for(Index i = 0; i < neurons_number; i++)
1219 file_stream.OpenElement(
"ScalingNeuron");
1225 file_stream.OpenElement(
"Minimum");
1230 file_stream.
PushText(buffer.str().c_str());
1236 file_stream.OpenElement(
"Maximum");
1241 file_stream.
PushText(buffer.str().c_str());
1247 file_stream.OpenElement(
"Mean");
1252 file_stream.
PushText(buffer.str().c_str());
1258 file_stream.OpenElement(
"StandardDeviation");
1263 file_stream.
PushText(buffer.str().c_str());
1269 file_stream.OpenElement(
"Scaler");
1272 buffer << scaling_methods_string(i);
1274 file_stream.
PushText(buffer.str().c_str());
1294 ostringstream buffer;
1296 const tinyxml2::XMLElement* scaling_layer_element = document.FirstChildElement(
"ScalingLayer");
1298 if(!scaling_layer_element)
1300 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1301 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1302 <<
"Scaling layer element is nullptr.\n";
1304 throw logic_error(buffer.str());
1309 const tinyxml2::XMLElement* neurons_number_element = scaling_layer_element->FirstChildElement(
"ScalingNeuronsNumber");
1311 if(!neurons_number_element)
1313 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1314 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1315 <<
"Scaling neurons number element is nullptr.\n";
1317 throw logic_error(buffer.str());
1320 const Index neurons_number =
static_cast<Index
>(atoi(neurons_number_element->GetText()));
1322 set(neurons_number);
1328 for(Index i = 0; i < neurons_number; i++)
1331 start_element = scaling_neuron_element;
1333 if(!scaling_neuron_element)
1335 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1336 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1337 <<
"Scaling neuron " << i+1 <<
" is nullptr.\n";
1339 throw logic_error(buffer.str());
1346 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1347 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1348 <<
"Index " << index <<
" is not correct.\n";
1350 throw logic_error(buffer.str());
1355 const tinyxml2::XMLElement* minimum_element = scaling_neuron_element->FirstChildElement(
"Minimum");
1357 if(!minimum_element)
1359 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1360 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1361 <<
"Minimum element " << i+1 <<
" is nullptr.\n";
1363 throw logic_error(buffer.str());
1366 if(minimum_element->GetText())
1368 descriptives[i].minimum =
static_cast<type
>(atof(minimum_element->GetText()));
1373 const tinyxml2::XMLElement* maximum_element = scaling_neuron_element->FirstChildElement(
"Maximum");
1375 if(!maximum_element)
1377 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1378 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1379 <<
"Maximum element " << i+1 <<
" is nullptr.\n";
1381 throw logic_error(buffer.str());
1384 if(maximum_element->GetText())
1386 descriptives[i].maximum =
static_cast<type
>(atof(maximum_element->GetText()));
1395 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1396 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1397 <<
"Mean element " << i+1 <<
" is nullptr.\n";
1399 throw logic_error(buffer.str());
1402 if(mean_element->GetText())
1404 descriptives[i].mean =
static_cast<type
>(atof(mean_element->GetText()));
1409 const tinyxml2::XMLElement* standard_deviation_element = scaling_neuron_element->FirstChildElement(
"StandardDeviation");
1411 if(!standard_deviation_element)
1413 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1414 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1415 <<
"Standard deviation element " << i+1 <<
" is nullptr.\n";
1417 throw logic_error(buffer.str());
1420 if(standard_deviation_element->GetText())
1422 descriptives[i].standard_deviation =
static_cast<type
>(atof(standard_deviation_element->GetText()));
1427 const tinyxml2::XMLElement* scaling_method_element = scaling_neuron_element->FirstChildElement(
"Scaler");
1429 if(!scaling_method_element)
1431 buffer <<
"OpenNN Exception: ScalingLayer class.\n"
1432 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
1433 <<
"Scaling method element " << i+1 <<
" is nullptr.\n";
1435 throw logic_error(buffer.str());
1438 string new_method = scaling_method_element->GetText();
1440 if(new_method ==
"NoScaling" || new_method ==
"No Scaling")
1442 scalers[i] = Scaler::NoScaling;
1444 else if(new_method ==
"MinimumMaximum" || new_method ==
"Minimum - Maximum")
1446 scalers[i] = Scaler::MinimumMaximum;
1448 else if(new_method ==
"MeanStandardDeviation" || new_method ==
"Mean - Standard deviation")
1450 scalers[i] = Scaler::MeanStandardDeviation;
1452 else if(new_method ==
"StandardDeviation")
1454 scalers[i] = Scaler::StandardDeviation;
1456 else if(new_method ==
"Logarithm")
1458 scalers[i] = Scaler::Logarithm;
1462 scalers[i] = Scaler::NoScaling;
1468 const tinyxml2::XMLElement* display_element = scaling_layer_element->FirstChildElement(
"Display");
1472 string new_display_string = display_element->GetText();
1478 catch(
const logic_error& e)
1480 cerr << e.what() << endl;
This abstract class represents the concept of layer of neurons in OpenNN.
string layer_name
Layer name.
Type layer_type
Layer type.
string write_expression_c() const
write_expression_c
void set_maximum(const Index &, const type &)
void set_descriptives(const Tensor< Descriptives, 1 > &)
void set_item_descriptives(const Index &, const Descriptives &)
Tensor< type, 1 > get_means() const
Returns a single matrix with the means of all scaling neurons.
void set_minimum(const Index &, const type &)
string write_mean_standard_deviation_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
Tensor< string, 1 > write_scalers() const
Returns a vector of strings with the name of the method used for each scaling neuron.
const bool & get_display() const
void set_standard_deviation(const Index &, const type &)
Index get_inputs_number() const
Returns the number of inputs.
string write_standard_deviation_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
string write_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
Returns a string with the expression of the inputs scaling process.
virtual void from_XML(const tinyxml2::XMLDocument &)
Tensor< Scaler, 1 > scalers
Vector of scaling methods for each variable.
void check_range(const Tensor< type, 1 > &) const
bool display
Display warning messages to screen.
bool is_empty() const
Returns true if the number of scaling neurons is zero, and false otherwise.
virtual ~ScalingLayer()
Destructor.
void set()
Sets the scaling layer to be empty.
Tensor< type, 1 > get_minimums() const
Returns a single matrix with the minimums of all scaling neurons.
string write_minimum_maximum_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
Tensor< type, 1 > get_maximums() const
Returns a single matrix with the maximums of all scaling neurons.
string write_no_scaling_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
void set_scalers(const Tensor< Scaler, 1 > &)
Tensor< type, 2 > calculate_outputs(const Tensor< type, 2 > &)
Tensor< Descriptives, 1 > get_descriptives() const
type min_range
min and max range for minmaxscaling
const Tensor< Scaler, 1 > get_scaling_methods() const
Returns the methods used for scaling.
Tensor< Descriptives, 1 > descriptives
Descriptives of input variables.
Tensor< string, 1 > write_scalers_text() const
void set_display(const bool &)
Tensor< type, 1 > get_standard_deviations() const
Returns a single matrix with the standard deviations of all scaling neurons.
void write_XML(tinyxml2::XMLPrinter &) const
void set_mean(const Index &, const type &)
void set_min_max_range(const type &min, const type &max)
XMLError QueryUnsignedAttribute(const char *name, unsigned int *value) const
See QueryIntAttribute()
const XMLElement * NextSiblingElement(const char *name=nullptr) const
Get the next(right) sibling element of this node, with an optionally supplied name.
void PushText(const char *text, bool cdata=false)
Add a text node.
void PushAttribute(const char *name, const char *value)
If streaming, add an attribute to an open element.
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
HALF_CONSTEXPR half abs(half arg)
This structure contains the simplest Descriptives for a set, variable, etc. It includes :