9#include "minkowski_error.h"
31 :
LossIndex(new_neural_network_pointer, new_data_set_pointer)
75 if(new_Minkowski_parameter < type(1) || new_Minkowski_parameter > type(2.0))
79 buffer <<
"OpenNN Error. MinkowskiError class.\n"
80 <<
"void set_Minkowski_parameter(const type&) method.\n"
81 <<
"The Minkowski parameter must be comprised between 1 and 2.\n";
83 throw logic_error(buffer.str());
102 Tensor<type, 0> minkowski_error;
104 minkowski_error.device(*thread_pool_device)
107 const Index batch_samples_number = batch.get_samples_number();
109 back_propagation.error = minkowski_error(0)/batch_samples_number;
113void MinkowskiError::calculate_output_delta(
const DataSetBatch& batch,
119 LayerBackPropagation* output_layer_back_propagation = back_propagation.neural_network.layers(trainable_layers_number-1);
121 const Tensor<type, 0> p_norm_derivative =
124 const Index batch_samples_number = batch.get_samples_number();
126 switch(output_layer_back_propagation->layer_pointer->
get_type())
128 case Layer::Type::Perceptron:
133 if(
abs(p_norm_derivative()) < type(NUMERIC_LIMITS_MIN))
135 perceptron_layer_back_propagation->delta.setZero();
139 perceptron_layer_back_propagation->delta.device(*thread_pool_device)
140 = back_propagation.errors*(back_propagation.errors.abs().pow(
minkowski_parameter - type(2)));
142 perceptron_layer_back_propagation->delta.device(*thread_pool_device) =
143 (type(1.0/batch_samples_number))*perceptron_layer_back_propagation->delta/p_norm_derivative();
148 case Layer::Type::Probabilistic:
150 ProbabilisticLayerBackPropagation* probabilistic_layer_back_propagation
151 =
static_cast<ProbabilisticLayerBackPropagation*
>(output_layer_back_propagation);
153 if(
abs(p_norm_derivative()) < type(NUMERIC_LIMITS_MIN))
155 probabilistic_layer_back_propagation->delta.setZero();
160 probabilistic_layer_back_propagation->delta.device(*thread_pool_device)
161 = back_propagation.errors*(back_propagation.errors.abs().pow(
minkowski_parameter - type(2)));
163 probabilistic_layer_back_propagation->delta.device(*thread_pool_device) =
164 (type(1.0/batch_samples_number))*probabilistic_layer_back_propagation->delta/p_norm_derivative();
169 case Layer::Type::Recurrent:
171 RecurrentLayerBackPropagation* recurrent_layer_back_propagation
172 =
static_cast<RecurrentLayerBackPropagation*
>(output_layer_back_propagation);
174 if(
abs(p_norm_derivative()) < type(NUMERIC_LIMITS_MIN))
176 recurrent_layer_back_propagation->delta.setZero();
180 recurrent_layer_back_propagation->delta.device(*thread_pool_device)
181 = back_propagation.errors*(back_propagation.errors.abs().pow(
minkowski_parameter - type(2)));
183 recurrent_layer_back_propagation->delta.device(*thread_pool_device) =
184 (type(1.0/batch_samples_number))*recurrent_layer_back_propagation->delta/p_norm_derivative();
189 case Layer::Type::LongShortTermMemory:
191 LongShortTermMemoryLayerBackPropagation* long_short_term_memory_layer_back_propagation
192 =
static_cast<LongShortTermMemoryLayerBackPropagation*
>(output_layer_back_propagation);
194 if(
abs(p_norm_derivative()) < type(NUMERIC_LIMITS_MIN))
196 long_short_term_memory_layer_back_propagation->delta.setZero();
200 long_short_term_memory_layer_back_propagation->delta.device(*thread_pool_device)
201 = back_propagation.errors*(back_propagation.errors.abs().pow(
minkowski_parameter - type(2)));
203 long_short_term_memory_layer_back_propagation->delta.device(*thread_pool_device) =
204 (type(1.0/batch_samples_number))*long_short_term_memory_layer_back_propagation->delta/p_norm_derivative();
218 return "MINKOWSKI_ERROR";
226 return "Minkowski error";
235 ostringstream buffer;
239 file_stream.OpenElement(
"MinkowskiError");
243 file_stream.OpenElement(
"MinkowskiParameter");
248 file_stream.
PushText(buffer.str().c_str());
267 ostringstream buffer;
269 buffer <<
"OpenNN Exception: MinkowskiError class.\n"
270 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
271 <<
"Minkowski error element is nullptr.\n";
273 throw logic_error(buffer.str());
280 const tinyxml2::XMLElement* parameter_element = root_element->FirstChildElement(
"MinkowskiParameter");
282 type new_Minkowski_parameter = type(1.5);
284 if(parameter_element)
286 new_Minkowski_parameter =
static_cast<type
>(atof(parameter_element->GetText()));
293 catch(
const logic_error& e)
295 cerr << e.what() << endl;
This class represents the concept of data set for data modelling problems, such as approximation,...
This abstract class represents the concept of loss index composed of an error term and a regularizati...
NeuralNetwork * neural_network_pointer
Pointer to a neural network object.
bool display
Display messages to screen.
type minkowski_parameter
Minkowski exponent value.
void from_XML(const tinyxml2::XMLDocument &)
string get_error_type() const
Returns a string with the name of the Minkowski error loss type, "MINKOWSKI_ERROR".
virtual ~MinkowskiError()
void set_Minkowski_parameter(const type &)
type get_Minkowski_parameter() const
Returns the Minkowski exponent value used to calculate the error.
void write_XML(tinyxml2::XMLPrinter &) const
string get_error_type_text() const
Returns a string with the name of the Minkowski error loss type in text format.
void calculate_error(const DataSetBatch &batch, const NeuralNetworkForwardPropagation &forward_propagation, LossIndexBackPropagation &back_propagation) const
MinkowskiError::calculate_error.
void PushText(const char *text, bool cdata=false)
Add a text node.
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
HALF_CONSTEXPR half abs(half arg)