9#include "cross_entropy_error.h"
31 :
LossIndex(new_neural_network_pointer, new_data_set_pointer)
48void CrossEntropyError::calculate_error(
const DataSetBatch& batch,
54 Layer* last_trainable_layer_pointer = forward_propagation.neural_network_pointer->get_last_trainable_layer_pointer();
56 if(last_trainable_layer_pointer->
get_type() != Layer::Probabilistic)
60 buffer <<
"OpenNN Exception: CrossEntropyError class.\n"
61 <<
"calculate_error() method.\n"
62 <<
"Last trainable layer is not probabilistic: " << last_trainable_layer_pointer->
get_type_string() << endl;
64 throw logic_error(buffer.str());
71 if(outputs_number == 1)
73 calculate_binary_error(batch, forward_propagation, back_propagation);
77 calculate_multiple_error(batch, forward_propagation, back_propagation);
83void CrossEntropyError::calculate_binary_error(
const DataSetBatch& batch,
84 const NeuralNetworkForwardPropagation& forward_propagation,
85 LossIndexBackPropagation& back_propagation)
const
87 const Index batch_samples_number = batch.inputs_2d.dimension(0);
91 const Tensor<type, 2>& outputs =
92 static_cast<ProbabilisticLayerForwardPropagation*
>(forward_propagation.layers(trainable_layers_number-1))->activations;
94 const Tensor<type, 2>& targets = batch.targets_2d;
96 Tensor<type, 0> cross_entropy_error;
98 cross_entropy_error.device(*thread_pool_device) = -(targets*(outputs.log())).sum() - ((type(1)-targets)*((type(1)-outputs).log())).sum();
100 back_propagation.error = cross_entropy_error()/
static_cast<type
>(batch_samples_number);
104void CrossEntropyError::calculate_multiple_error(
const DataSetBatch& batch,
105 const NeuralNetworkForwardPropagation& forward_propagation,
106 LossIndexBackPropagation& back_propagation)
const
108 const Index batch_samples_number = batch.inputs_2d.dimension(0);
112 const Tensor<type, 2>& outputs =
113 static_cast<ProbabilisticLayerForwardPropagation*
>(forward_propagation.layers(trainable_layers_number-1))->activations;
115 const Tensor<type, 2>& targets = batch.targets_2d;
117 Tensor<type, 0> cross_entropy_error;
118 cross_entropy_error.device(*thread_pool_device) = -(targets*(outputs.log())).sum();
120 back_propagation.error = cross_entropy_error()/
static_cast<type
>(batch_samples_number);
124void CrossEntropyError::calculate_output_delta(
const DataSetBatch& batch,
125 NeuralNetworkForwardPropagation& forward_propagation,
126 LossIndexBackPropagation& back_propagation)
const
136 if(outputs_number == 1)
138 calculate_binary_output_delta(batch, forward_propagation, back_propagation);
142 calculate_multiple_output_delta(batch, forward_propagation, back_propagation);
147void CrossEntropyError::calculate_binary_output_delta(
const DataSetBatch& batch,
148 NeuralNetworkForwardPropagation& forward_propagation,
149 LossIndexBackPropagation& back_propagation)
const
153 ProbabilisticLayerForwardPropagation* probabilistic_layer_forward_propagation
154 =
static_cast<ProbabilisticLayerForwardPropagation*
>(forward_propagation.layers(trainable_layers_number-1));
156 ProbabilisticLayerBackPropagation* probabilistic_layer_back_propagation
157 =
static_cast<ProbabilisticLayerBackPropagation*
>(back_propagation.neural_network.layers(trainable_layers_number-1));
159 const Index batch_samples_number = batch.inputs_2d.dimension(0);
161 const Tensor<type, 2>& targets = batch.targets_2d;
163 const Tensor<type, 2>& outputs = probabilistic_layer_forward_propagation->activations;
165 probabilistic_layer_back_propagation->delta.device(*thread_pool_device)
166 =
static_cast<type
>(1)/
static_cast<type
>(batch_samples_number) *
167 (
static_cast<type
>(-1)*(targets/outputs) + (
static_cast<type
>(1) - targets)/(
static_cast<type
>(1) - outputs));
171void CrossEntropyError::calculate_multiple_output_delta(
const DataSetBatch& batch,
172 NeuralNetworkForwardPropagation& forward_propagation,
173 LossIndexBackPropagation& back_propagation)
const
177 ProbabilisticLayerBackPropagation* probabilistic_layer_back_propagation
178 =
static_cast<ProbabilisticLayerBackPropagation*
>(back_propagation.neural_network.layers(trainable_layers_number-1));
180 const Index batch_samples_number = batch.inputs_2d.dimension(0);
182 const Tensor<type, 2>& targets = batch.targets_2d;
184 const Tensor<type, 2>& outputs =
185 static_cast<ProbabilisticLayerForwardPropagation*
>(forward_propagation.layers(trainable_layers_number-1))->activations;
187 probabilistic_layer_back_propagation->delta.device(*thread_pool_device)
188 =
static_cast<type
>(1)/
static_cast<type
>(batch_samples_number) *(-targets/outputs);
196 return "CROSS_ENTROPY_ERROR";
204 return "Cross entropy error";
215 file_stream.OpenElement(
"CrossEntropyError");
230 ostringstream buffer;
232 buffer <<
"OpenNN Exception: CrossEntropyError class.\n"
233 <<
"void from_XML(const tinyxml2::XMLDocument&) method.\n"
234 <<
"Cross entropy error element is nullptr.\n";
236 throw logic_error(buffer.str());
244 const tinyxml2::XMLElement* regularization_element = root_element->FirstChildElement(
"Regularization");
246 element_clone = regularization_element->DeepClone(®ularization_document);
248 regularization_document.InsertFirstChild(element_clone);
250 regularization_from_XML(regularization_document);
void from_XML(const tinyxml2::XMLDocument &)
string get_error_type() const
Returns a string with the name of the cross entropy error loss type, "CROSS_ENTROPY_ERROR".
virtual ~CrossEntropyError()
Destructor.
void write_XML(tinyxml2::XMLPrinter &) const
string get_error_type_text() const
Returns a string with the name of the cross entropy error loss type in text format.
This class represents the concept of data set for data modelling problems, such as approximation,...
This abstract class represents the concept of layer of neurons in OpenNN.
string get_type_string() const
Takes the type of layer used by the model.
This abstract class represents the concept of loss index composed of an error term and a regularizati...
NeuralNetwork * neural_network_pointer
Pointer to a neural network object.
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.