9#ifndef PROBABILISTICLAYER_H
10#define PROBABILISTICLAYER_H
27#include "opennn_strings.h"
33struct ProbabilisticLayerForwardPropagation;
34struct ProbabilisticLayerBackPropagation;
35struct ProbabilisticLayerBackPropagationLM;
38 #include "../../opennn-cuda/opennn-cuda/struct_probabilistic_layer_cuda.h"
73 Index get_neurons_number()
const;
75 Index get_biases_number()
const;
89 void set(
const Index&,
const Index&);
92 void set_inputs_number(
const Index&);
93 void set_neurons_number(
const Index&);
95 void set_biases(
const Tensor<type, 2>&);
96 void set_synaptic_weights(
const Tensor<type, 2>&);
98 void set_parameters(
const Tensor<type, 1>&,
const Index& index=0);
111 Tensor<type, 2>
get_biases(Tensor<type, 1>&)
const;
125 void set_synaptic_weights_constant_Glorot();
131 void insert_parameters(
const Tensor<type, 1>&,
const Index&);
135 void calculate_combinations(
const Tensor<type, 2>&,
136 const Tensor<type, 2>&,
137 const Tensor<type, 2>&,
138 Tensor<type, 2>&)
const;
142 void calculate_activations(
const Tensor<type, 2>&, Tensor<type, 2>&)
const;
144 void calculate_activations_derivatives(
const Tensor<type, 2>&,
146 Tensor<type, 3>&)
const;
152 void forward_propagate(
const Tensor<type, 2>&,
155 void forward_propagate(
const Tensor<type, 2>&,
161 void calculate_error_gradient(
const Tensor<type, 2>&,
169 void calculate_squared_errors_Jacobian_lm(
const Tensor<type, 2>&,
175 Tensor<type, 2>&)
const;
185 string write_expression(
const Tensor<string, 1>&,
const Tensor<string, 1>&)
const;
186 string write_combinations(
const Tensor<string, 1>&)
const;
187 string write_activations(
const Tensor<string, 1>&)
const;
189 string write_expression_c()
const;
190 string write_combinations_c()
const;
193 string write_expression_python()
const;
194 string write_combinations_python()
const;
195 string write_activations_python()
const;
219 type decision_threshold;
226 #include "../../opennn-cuda/opennn-cuda/probabilistic_layer_cuda.h"
244 set(new_batch_samples_number, new_layer_pointer);
247 void set(
const Index& new_batch_samples_number,
Layer* new_layer_pointer)
249 layer_pointer = new_layer_pointer;
251 batch_samples_number = new_batch_samples_number;
253 const Index neurons_number = layer_pointer->get_neurons_number();
255 combinations.resize(batch_samples_number, neurons_number);
257 activations.resize(batch_samples_number, neurons_number);
259 activations_derivatives.resize(batch_samples_number, neurons_number, neurons_number);
265 cout <<
"Combinations:" << endl;
266 cout << combinations << endl;
268 cout <<
"Activations:" << endl;
269 cout << activations << endl;
271 cout <<
"Activations derivatives:" << endl;
272 cout << activations_derivatives << endl;
275 Tensor<type, 2> combinations;
276 Tensor<type, 2> activations;
277 Tensor<type, 3> activations_derivatives;
292 set(new_batch_samples_number, new_layer_pointer);
296 void set(
const Index& new_batch_samples_number,
Layer* new_layer_pointer)
298 layer_pointer = new_layer_pointer;
300 batch_samples_number = new_batch_samples_number;
302 const Index neurons_number = layer_pointer->get_neurons_number();
303 const Index parameters_number = layer_pointer->get_parameters_number();
305 delta.resize(batch_samples_number, neurons_number);
306 delta_row.resize(neurons_number);
308 squared_errors_Jacobian.resize(batch_samples_number, parameters_number);
310 error_combinations_derivatives.resize(batch_samples_number, neurons_number);
315 cout <<
"Delta:" << endl;
316 cout << delta << endl;
318 cout <<
"Squared errors Jacobian: " << endl;
319 cout << squared_errors_Jacobian << endl;
322 Tensor<type, 2> delta;
323 Tensor<type, 1> delta_row;
325 Tensor<type, 2> error_combinations_derivatives;
327 Tensor<type, 2> squared_errors_Jacobian;
343 set(new_batch_samples_number, new_layer_pointer);
347 void set(
const Index& new_batch_samples_number,
Layer* new_layer_pointer)
349 layer_pointer = new_layer_pointer;
351 batch_samples_number = new_batch_samples_number;
353 const Index neurons_number = layer_pointer->get_neurons_number();
356 biases_derivatives.resize(neurons_number);
358 synaptic_weights_derivatives.resize(inputs_number, neurons_number);
360 delta.resize(batch_samples_number, neurons_number);
361 delta_row.resize(neurons_number);
363 error_combinations_derivatives.resize(batch_samples_number, neurons_number);
368 cout <<
"Delta:" << endl;
369 cout << delta << endl;
371 cout <<
"Biases derivatives:" << endl;
372 cout << biases_derivatives << endl;
374 cout <<
"Synaptic weights derivatives:" << endl;
375 cout << synaptic_weights_derivatives << endl;
378 Tensor<type, 2> delta;
379 Tensor<type, 1> delta_row;
381 Tensor<type, 2> error_combinations_derivatives;
383 Tensor<type, 2> synaptic_weights_derivatives;
384 Tensor<type, 1> biases_derivatives;
This abstract class represents the concept of layer of neurons in OpenNN.
virtual Index get_inputs_number() const
Returns the number of inputs.
This class represents a layer of probabilistic neurons.
string write_activation_function() const
void set_parameters_constant(const type &)
string write_softmax_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
Index get_synaptic_weights_number() const
Returns the number of layer's synaptic weights.
const bool & get_display() const
Index get_inputs_number() const
Returns the number of inputs.
const ActivationFunction & get_activation_function() const
string write_activation_function_text() const
void set_biases_constant(const type &)
string write_logistic_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
void from_XML(const tinyxml2::XMLDocument &)
virtual void set_default()
bool display
Display messages to screen.
ActivationFunction
Enumeration of available methods for interpreting variables as probabilities.
virtual ~ProbabilisticLayer()
void set_activation_function(const ActivationFunction &)
const Tensor< type, 2 > & get_biases() const
Returns the biases of the layer.
const type & get_decision_threshold() const
Returns the decision threshold.
string write_no_probabilistic_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
Tensor< type, 2 > calculate_outputs(const Tensor< type, 2 > &)
string write_activations_c() const
string write_competitive_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
void set_parameters_random()
string write_binary_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
void set_decision_threshold(const type &)
void set_synaptic_weights_constant(const type &)
void set_display(const bool &)
Tensor< type, 2 > synaptic_weights
This matrix containing conection strengths from a layer's inputs to its neurons.
void write_XML(tinyxml2::XMLPrinter &) const
const Tensor< type, 2 > & get_synaptic_weights() const
Returns the synaptic weights of the layer.
Index get_parameters_number() const
Returns the number of parameters(biases and synaptic weights) of the layer.
ActivationFunction activation_function
Activation function variable.
Tensor< type, 1 > get_parameters() const
LayerBackPropagation()
Default constructor.
LayerBackPropagationLM()
Default constructor.
LayerForwardPropagation()
Default constructor.