9#ifndef PERCEPTRONLAYER_H
10#define PERCEPTRONLAYER_H
25#include "probabilistic_layer.h"
27#include "opennn_strings.h"
32struct PerceptronLayerForwardPropagation;
33struct PerceptronLayerBackPropagation;
34struct PerceptronLayerBackPropagationLM;
37 #include "../../opennn-cuda/opennn-cuda/struct_perceptron_layer_cuda.h"
56 enum class ActivationFunction{Threshold, SymmetricThreshold, Logistic, HyperbolicTangent, Linear, RectifiedLinear,
57 ExponentialLinear, ScaledExponentialLinear, SoftPlus, SoftSign, HardSigmoid};
71 bool is_empty()
const;
81 Tensor<type, 2>
get_biases(
const Tensor<type, 1>&)
const;
84 Index get_biases_number()
const;
105 void set_name(
const string&);
117 void set_parameters(
const Tensor<type, 1>&,
const Index& index=0);
139 void calculate_combinations(
const Tensor<type, 2>&,
140 const Tensor<type, 2>&,
141 const Tensor<type, 2>&,
142 Tensor<type, 2>&)
const;
146 void calculate_activations(
const Tensor<type, 2>&,
147 Tensor<type, 2>&)
const;
149 void calculate_activations_derivatives(
const Tensor<type, 2>&,
151 Tensor<type, 2>&)
const;
155 Tensor<type, 2> calculate_outputs(
const Tensor<type, 2>&);
157 void forward_propagate(
const Tensor<type, 2>&,
161 void forward_propagate(
const Tensor<type, 2>&,
195 void calculate_squared_errors_Jacobian_lm(
const Tensor<type, 2>&,
201 Tensor<type, 2>&)
const;
205 void calculate_error_gradient(
const Tensor<type, 2>&,
211 Tensor<type, 1>&)
const;
215 string write_expression(
const Tensor<string, 1>&,
const Tensor<string, 1>&)
const;
217 string write_activation_function_expression()
const;
219 string write_expression_c()
const;
220 string write_combinations_c()
const;
223 string write_combinations_python()
const;
225 string write_expression_python()
const;
254 #include "../../opennn-cuda/opennn-cuda/perceptron_layer_cuda.h"
270 set(new_batch_samples_number, new_layer_pointer);
273 void set(
const Index& new_batch_samples_number,
Layer* new_layer_pointer)
275 layer_pointer = new_layer_pointer;
277 batch_samples_number = new_batch_samples_number;
279 const Index neurons_number = layer_pointer->get_neurons_number();
281 combinations.resize(batch_samples_number, neurons_number);
283 activations.resize(batch_samples_number, neurons_number);
285 activations_derivatives.resize(batch_samples_number, neurons_number);
290 cout <<
"Combinations:" << endl;
291 cout << combinations << endl;
293 cout <<
"Activations:" << endl;
294 cout << activations << endl;
296 cout <<
"Activations derivatives:" << endl;
297 cout << activations_derivatives << endl;
300 Tensor<type, 2> combinations;
301 Tensor<type, 2> activations;
302 Tensor<type, 2> activations_derivatives;
319 set(new_batch_samples_number, new_layer_pointer);
323 void set(
const Index& new_batch_samples_number,
Layer* new_layer_pointer)
325 layer_pointer = new_layer_pointer;
327 batch_samples_number = new_batch_samples_number;
329 const Index neurons_number = layer_pointer->get_neurons_number();
330 const Index parameters_number = layer_pointer->get_parameters_number();
332 delta.resize(batch_samples_number, neurons_number);
334 squared_errors_Jacobian.resize(batch_samples_number, parameters_number);
339 cout <<
"Delta:" << endl;
340 cout << delta << endl;
342 cout <<
"Squared errors Jacobian: " << endl;
343 cout << squared_errors_Jacobian << endl;
347 Tensor<type, 2> delta;
349 Tensor<type, 2> squared_errors_Jacobian;
367 set(new_batch_samples_number, new_layer_pointer);
371 void set(
const Index& new_batch_samples_number,
Layer* new_layer_pointer)
373 layer_pointer = new_layer_pointer;
375 batch_samples_number = new_batch_samples_number;
377 const Index neurons_number = layer_pointer->get_neurons_number();
380 delta.resize(batch_samples_number, neurons_number);
382 biases_derivatives.resize(neurons_number);
384 synaptic_weights_derivatives.resize(inputs_number, neurons_number);
389 cout <<
"Delta:" << endl;
390 cout << delta << endl;
392 cout <<
"Biases derivatives:" << endl;
393 cout << biases_derivatives << endl;
395 cout <<
"Synaptic weights derivatives:" << endl;
396 cout << synaptic_weights_derivatives << endl;
399 Tensor<type, 2> delta;
401 Tensor<type, 1> biases_derivatives;
402 Tensor<type, 2> synaptic_weights_derivatives;
This abstract class represents the concept of layer of neurons in OpenNN.
virtual Index get_inputs_number() const
Returns the number of inputs.
This class represents a layer of perceptrons.
void set_parameters(const Tensor< type, 1 > &, const Index &index=0)
Sets the parameters of this layer.
string write_activation_function() const
void set_parameters_constant(const type &)
Index get_synaptic_weights_number() const
Returns the number of layer's synaptic weights.
string write_activations_python() const
const bool & get_display() const
virtual ~PerceptronLayer()
Index get_inputs_number() const
Returns the number of inputs to the layer.
void set_biases_constant(const type &)
string write_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
bool display
Display messages to screen.
ActivationFunction
Enumeration of available activation functions for the perceptron neuron model.
void set_activation_function(const ActivationFunction &)
const Tensor< type, 2 > & get_biases() const
void set_synaptic_weights(const Tensor< type, 2 > &)
void set_inputs_number(const Index &)
void set_biases(const Tensor< type, 2 > &)
Index get_neurons_number() const
Returns the number of neurons in the layer.
string write_activations_c() const
void set_parameters_random()
const PerceptronLayer::ActivationFunction & get_activation_function() const
void set_synaptic_weights_constant(const type &)
void set_display(const bool &)
Tensor< type, 2 > synaptic_weights
This matrix containing conection strengths from a layer's inputs to its neurons.
const Tensor< type, 2 > & get_synaptic_weights() const
Index get_parameters_number() const
Returns the number of parameters(biases and synaptic weights) of the layer.
ActivationFunction activation_function
Activation function variable.
void set_neurons_number(const Index &)
Tensor< type, 1 > get_parameters() const
LayerBackPropagation()
Default constructor.
LayerBackPropagationLM()
Default constructor.
LayerForwardPropagation()
Default constructor.