OpenNN
Open-source neural networks library
Loading...
Searching...
No Matches
dense_layer.h
Go to the documentation of this file.
1// OpenNN: Open Neural Networks Library
2// www.opennn.net
3//
4// D E N S E L A Y E R C L A S S H E A D E R
5//
6// Artificial Intelligence Techniques SL
7// artelnics@artelnics.com
8
15
16#pragma once
17
18#include "layer.h"
19#include "operators.h"
20
21namespace opennn
22{
23
37class Dense final : public Layer
38{
39public:
40
52 Dense(const Shape& input_shape = {},
53 const Shape& output_shape = {},
54 const string& activation_name = "Tanh",
55 bool batch_normalization = false,
56 const string& label = "dense_layer");
57
59 Shape get_input_shape() const override { return input_shape; }
60
66 Shape get_output_shape() const override;
67
72 Index get_input_features() const { return input_shape.empty() ? 0 : input_shape.back(); }
73
78 Index get_sequence_length() const { return (input_shape.rank == 2) ? input_shape[0] : Index(1); }
79
81 const Activation::Function& get_activation_function() const { return activation.function; }
82
84 Activation::Function get_output_activation() const override { return activation.function; }
85
87 bool get_batch_normalization() const { return batch_norm.active(); }
88
90 float get_momentum() const { return batch_norm.momentum; }
91
97 vector<Operator*> get_operators() override;
98
104 vector<pair<Shape, Type>> get_forward_specs(Index batch_size) const override;
105
114 void set(const Shape& input_shape = {},
115 const Shape& output_shape = {},
116 const string& activation_name = "Tanh",
117 bool batch_normalization = false,
118 const string& label = "dense_layer");
119
121 void set_input_shape(const Shape&) override;
122
124 void set_output_shape(const Shape&) override;
125
127 void on_compute_dtype_changed() override { configure_operators(); }
128
135 void set_activation_function(const string&);
136
142 void set_batch_normalization(bool enable);
143
148 void set_dropout_rate(float new_dropout_rate) { dropout.set_rate(new_dropout_rate); }
149
154 void set_momentum(float new_momentum);
155
163 void back_propagate(ForwardPropagation& fp, BackPropagation& bp, size_t layer) const noexcept override;
164
169 void read_JSON_body(const Json*) override;
170
171private:
172
174 Shape input_shape;
175
177 Index output_features = 0;
178
180 Combination combination;
182 Activation activation;
184 BatchNorm batch_norm;
186 Dropout dropout;
187
189 enum Forward {Input, CombinationView, BatchNormMean, BatchNormInverseVariance, ActivationView, Output};
191 enum Backward {OutputDelta, InputDelta};
192
194 void configure_operators();
195};
196
197}
198
199// OpenNN: Open Neural Networks Library.
200// Copyright(C) 2005-2026 Artificial Intelligence Techniques, SL.
201// Licensed under the GNU Lesser General Public License v2.1 or later.
void set_dropout_rate(float new_dropout_rate)
Sets the dropout rate applied at the layer output.
Definition dense_layer.h:148
void read_JSON_body(const Json *) override
Reads the layer-specific JSON body (activation name, BN flag, dropout rate, output features) from the...
void set_activation_function(const string &)
Sets the activation function by name.
void set_batch_normalization(bool enable)
Enables or disables BatchNorm between the linear projection and the activation.
void set_input_shape(const Shape &) override
Updates the input shape and re-shapes weight tensors accordingly.
void set_momentum(float new_momentum)
Sets the momentum used by BatchNorm to update running statistics.
void set(const Shape &input_shape={}, const Shape &output_shape={}, const string &activation_name="Tanh", bool batch_normalization=false, const string &label="dense_layer")
Re-initializes the layer; same arguments as the constructor.
float get_momentum() const
BatchNorm momentum used when updating running statistics.
Definition dense_layer.h:90
void on_compute_dtype_changed() override
Reconfigures inner operators when the compute dtype changes.
Definition dense_layer.h:127
Shape get_output_shape() const override
Returns the per-sample output shape.
bool get_batch_normalization() const
Whether batch normalization is enabled in the pipeline.
Definition dense_layer.h:87
void set_output_shape(const Shape &) override
Updates the output features and re-shapes weight tensors accordingly.
const Activation::Function & get_activation_function() const
Reference to the activation function applied at this layer's output.
Definition dense_layer.h:81
Index get_sequence_length() const
Sequence length when the input is 2D, 1 otherwise.
Definition dense_layer.h:78
Activation::Function get_output_activation() const override
Activation function fused at the end of this layer.
Definition dense_layer.h:84
vector< pair< Shape, Type > > get_forward_specs(Index batch_size) const override
Specifications of the forward intermediate buffers.
Dense(const Shape &input_shape={}, const Shape &output_shape={}, const string &activation_name="Tanh", bool batch_normalization=false, const string &label="dense_layer")
Constructs a Dense layer.
Index get_input_features() const
Number of input features (last dimension of the input shape).
Definition dense_layer.h:72
Shape get_input_shape() const override
Returns the per-sample input shape.
Definition dense_layer.h:59
vector< Operator * > get_operators() override
Returns the active operators in pipeline order.
void back_propagate(ForwardPropagation &fp, BackPropagation &bp, size_t layer) const noexcept override
Backward pass through the Dropout, Activation, BatchNorm and Combination operators in reverse order.
Definition json.h:22
Layer()=default
Default constructor; only invoked by subclasses.
string label
User-visible label for this layer instance (default "my_layer").
Definition layer.h:469
Declares the Layer abstract base class and the LayerType enumeration.
Definition adaptive_moment_estimation.h:19
Definition operators.h:107
Function
Definition operators.h:108
Definition back_propagation.h:26
Definition operators.h:199
Definition operators.h:147
Definition operators.h:65
Definition forward_propagation.h:19
Definition tensor_utilities.h:46