minkowski_error.cpp
1// OpenNN: Open Neural Networks Library
2// www.opennn.net
3//
4// M I N K O W S K I E R R O R C L A S S
5//
6// Artificial Intelligence Techniques SL
7// artelnics@artelnics.com
8
9#include "minkowski_error.h"
10
11namespace OpenNN
12{
13
17
19{
21}
22
23
29
30MinkowskiError::MinkowskiError(NeuralNetwork* new_neural_network_pointer, DataSet* new_data_set_pointer)
31 : LossIndex(new_neural_network_pointer, new_data_set_pointer)
32{
34}
35
36
39
41{
42}
43
44
46
48{
50}
51
52
58
60{
61 minkowski_parameter = type(1.5);
62
63 display = true;
64}
65
66
70
71void MinkowskiError::set_Minkowski_parameter(const type& new_Minkowski_parameter)
72{
73 // Control sentence
74
75 if(new_Minkowski_parameter < type(1) || new_Minkowski_parameter > type(2.0))
76 {
77 ostringstream buffer;
78
79 buffer << "OpenNN Error. MinkowskiError class.\n"
80 << "void set_Minkowski_parameter(const type&) method.\n"
81 << "The Minkowski parameter must be comprised between 1 and 2.\n";
82
83 throw logic_error(buffer.str());
84 }
85
86 // Set Minkowski parameter
87
88 minkowski_parameter = new_Minkowski_parameter;
89}
90
91
97
100 LossIndexBackPropagation& back_propagation) const
101{
102 Tensor<type, 0> minkowski_error;
103
104 minkowski_error.device(*thread_pool_device)
105 = (back_propagation.errors.abs().pow(minkowski_parameter).sum()).pow(static_cast<type>(1.0)/minkowski_parameter);
106
107 const Index batch_samples_number = batch.get_samples_number();
108
109 back_propagation.error = minkowski_error(0)/batch_samples_number;
110}
111
112
113void MinkowskiError::calculate_output_delta(const DataSetBatch& batch,
115 LossIndexBackPropagation& back_propagation) const
116{
117 const Index trainable_layers_number = neural_network_pointer->get_trainable_layers_number();
118
119 LayerBackPropagation* output_layer_back_propagation = back_propagation.neural_network.layers(trainable_layers_number-1);
120
121 const Tensor<type, 0> p_norm_derivative =
122 (back_propagation.errors.abs().pow(minkowski_parameter).sum().pow(static_cast<type>(1.0)/minkowski_parameter)).pow(minkowski_parameter - type(1));
123
124 const Index batch_samples_number = batch.get_samples_number();
125
126 switch(output_layer_back_propagation->layer_pointer->get_type())
127 {
128 case Layer::Type::Perceptron:
129 {
130 PerceptronLayerBackPropagation* perceptron_layer_back_propagation
131 = static_cast<PerceptronLayerBackPropagation*>(output_layer_back_propagation);
132
133 if(abs(p_norm_derivative()) < type(NUMERIC_LIMITS_MIN))
134 {
135 perceptron_layer_back_propagation->delta.setZero();
136 }
137 else
138 {
139 perceptron_layer_back_propagation->delta.device(*thread_pool_device)
140 = back_propagation.errors*(back_propagation.errors.abs().pow(minkowski_parameter - type(2)));
141
142 perceptron_layer_back_propagation->delta.device(*thread_pool_device) =
143 (type(1.0/batch_samples_number))*perceptron_layer_back_propagation->delta/p_norm_derivative();
144 }
145 }
146 break;
147
148 case Layer::Type::Probabilistic:
149 {
150 ProbabilisticLayerBackPropagation* probabilistic_layer_back_propagation
151 = static_cast<ProbabilisticLayerBackPropagation*>(output_layer_back_propagation);
152
153 if(abs(p_norm_derivative()) < type(NUMERIC_LIMITS_MIN))
154 {
155 probabilistic_layer_back_propagation->delta.setZero();
156
157 }
158 else
159 {
160 probabilistic_layer_back_propagation->delta.device(*thread_pool_device)
161 = back_propagation.errors*(back_propagation.errors.abs().pow(minkowski_parameter - type(2)));
162
163 probabilistic_layer_back_propagation->delta.device(*thread_pool_device) =
164 (type(1.0/batch_samples_number))*probabilistic_layer_back_propagation->delta/p_norm_derivative();
165 }
166 }
167 break;
168
169 case Layer::Type::Recurrent:
170 {
171 RecurrentLayerBackPropagation* recurrent_layer_back_propagation
172 = static_cast<RecurrentLayerBackPropagation*>(output_layer_back_propagation);
173
174 if(abs(p_norm_derivative()) < type(NUMERIC_LIMITS_MIN))
175 {
176 recurrent_layer_back_propagation->delta.setZero();
177 }
178 else
179 {
180 recurrent_layer_back_propagation->delta.device(*thread_pool_device)
181 = back_propagation.errors*(back_propagation.errors.abs().pow(minkowski_parameter - type(2)));
182
183 recurrent_layer_back_propagation->delta.device(*thread_pool_device) =
184 (type(1.0/batch_samples_number))*recurrent_layer_back_propagation->delta/p_norm_derivative();
185 }
186 }
187 break;
188
189 case Layer::Type::LongShortTermMemory:
190 {
191 LongShortTermMemoryLayerBackPropagation* long_short_term_memory_layer_back_propagation
192 = static_cast<LongShortTermMemoryLayerBackPropagation*>(output_layer_back_propagation);
193
194 if(abs(p_norm_derivative()) < type(NUMERIC_LIMITS_MIN))
195 {
196 long_short_term_memory_layer_back_propagation->delta.setZero();
197 }
198 else
199 {
200 long_short_term_memory_layer_back_propagation->delta.device(*thread_pool_device)
201 = back_propagation.errors*(back_propagation.errors.abs().pow(minkowski_parameter - type(2)));
202
203 long_short_term_memory_layer_back_propagation->delta.device(*thread_pool_device) =
204 (type(1.0/batch_samples_number))*long_short_term_memory_layer_back_propagation->delta/p_norm_derivative();
205 }
206 }
207 break;
208
209 default: break;
210 }
211}
212
213
215
217{
218 return "MINKOWSKI_ERROR";
219}
220
221
223
225{
226 return "Minkowski error";
227}
228
229
232
234{
235 ostringstream buffer;
236
237 // Error type
238
239 file_stream.OpenElement("MinkowskiError");
240
241 // Minkowski parameter
242
243 file_stream.OpenElement("MinkowskiParameter");
244
245 buffer.str("");
246 buffer << minkowski_parameter;
247
248 file_stream.PushText(buffer.str().c_str());
249
250 file_stream.CloseElement();
251
252 // Close error
253
254 file_stream.CloseElement();
255}
256
257
260
262{
263 const tinyxml2::XMLElement* root_element = document.FirstChildElement("MinkowskiError");
264
265 if(!root_element)
266 {
267 ostringstream buffer;
268
269 buffer << "OpenNN Exception: MinkowskiError class.\n"
270 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
271 << "Minkowski error element is nullptr.\n";
272
273 throw logic_error(buffer.str());
274 }
275
276 // Minkowski parameter
277
278 if(root_element)
279 {
280 const tinyxml2::XMLElement* parameter_element = root_element->FirstChildElement("MinkowskiParameter");
281
282 type new_Minkowski_parameter = type(1.5);
283
284 if(parameter_element)
285 {
286 new_Minkowski_parameter = static_cast<type>(atof(parameter_element->GetText()));
287 }
288
289 try
290 {
291 set_Minkowski_parameter(new_Minkowski_parameter);
292 }
293 catch(const logic_error& e)
294 {
295 cerr << e.what() << endl;
296 }
297 }
298}
299}
300
301
302// OpenNN: Open Neural Networks Library.
303// Copyright(C) 2005-2021 Artificial Intelligence Techniques, SL.
304//
305// This library is free software; you can redistribute it and/or
306// modify it under the terms of the GNU Lesser General Public
307// License as published by the Free Software Foundation; either
308// version 2.1 of the License, or any later version.
309//
310// This library is distributed in the hope that it will be useful,
311// but WITHOUT ANY WARRANTY; without even the implied warranty of
312// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
313// Lesser General Public License for more details.
314
315// You should have received a copy of the GNU Lesser General Public
316// License along with this library; if not, write to the Free Software
317// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This class represents the concept of data set for data modelling problems, such as approximation,...
Definition: data_set.h:57
Type get_type() const
Definition: layer.cpp:25
This abstract class represents the concept of loss index composed of an error term and a regularizati...
Definition: loss_index.h:48
NeuralNetwork * neural_network_pointer
Pointer to a neural network object.
Definition: loss_index.h:254
bool display
Display messages to screen.
Definition: loss_index.h:270
type minkowski_parameter
Minkowski exponent value.
void from_XML(const tinyxml2::XMLDocument &)
string get_error_type() const
Returns a string with the name of the Minkowski error loss type, "MINKOWSKI_ERROR".
void set_Minkowski_parameter(const type &)
type get_Minkowski_parameter() const
Returns the Minkowski exponent value used to calculate the error.
void write_XML(tinyxml2::XMLPrinter &) const
string get_error_type_text() const
Returns a string with the name of the Minkowski error loss type in text format.
void calculate_error(const DataSetBatch &batch, const NeuralNetworkForwardPropagation &forward_propagation, LossIndexBackPropagation &back_propagation) const
MinkowskiError::calculate_error.
void PushText(const char *text, bool cdata=false)
Add a text node.
Definition: tinyxml2.cpp:2878
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
Definition: tinyxml2.cpp:2834
HALF_CONSTEXPR half abs(half arg)
Definition: half.hpp:2735
half pow(half x, half y)
Definition: half.hpp:3427