perceptron_layer.cpp
1// OpenNN: Open Neural Networks Library
2// www.opennn.net
3//
4// P E R C E P T R O N L A Y E R C L A S S
5//
6// Artificial Intelligence Techniques SL
7// artelnics@artelnics.com
8
9#include "perceptron_layer.h"
10
11namespace OpenNN
12{
13
17
19{
20 set();
21
22 layer_type = Type::Perceptron;
23}
24
25
32
33PerceptronLayer::PerceptronLayer(const Index& new_inputs_number, const Index& new_neurons_number,
34 const PerceptronLayer::ActivationFunction& new_activation_function) : Layer()
35{
36 set(new_inputs_number, new_neurons_number, new_activation_function);
37
38 layer_type = Type::Perceptron;
39
40 layer_name = "perceptron_layer";
41}
42
43
46
48{
49}
50
51
53
55{
56 return synaptic_weights.dimension(0);
57}
58
59
61
63{
64 return biases.size();
65}
66
67
68Index PerceptronLayer::get_biases_number() const
69{
70 return biases.size();
71}
72
73
75
77{
78 return synaptic_weights.size();
79}
80
81
83
85{
86 return biases.size() + synaptic_weights.size();
87}
88
89
93
94const Tensor<type, 2>& PerceptronLayer::get_biases() const
95{
96 return biases;
97}
98
99
104
105const Tensor<type, 2>& PerceptronLayer::get_synaptic_weights() const
106{
107 return synaptic_weights;
108}
109
110
111Tensor<type, 2> PerceptronLayer::get_synaptic_weights(const Tensor<type, 1>& parameters) const
112{
113 const Index inputs_number = get_inputs_number();
114
115 const Index neurons_number = get_neurons_number();
116
117 const Index synaptic_weights_number = get_synaptic_weights_number();
118
119 const Index parameters_size = parameters.size();
120
121 const Index start_synaptic_weights_number = (parameters_size - synaptic_weights_number);
122
123 Tensor<type, 1> new_synaptic_weights = parameters.slice(Eigen::array<Eigen::Index, 1>({start_synaptic_weights_number}), Eigen::array<Eigen::Index, 1>({synaptic_weights_number}));
124
125 Eigen::array<Index, 2> two_dim{{inputs_number, neurons_number}};
126
127 return new_synaptic_weights.reshape(two_dim);
128}
129
130
131Tensor<type, 2> PerceptronLayer::get_biases(const Tensor<type, 1>& parameters) const
132{
133 const Index biases_number = biases.size();
134
135 Tensor<type, 1> new_biases(biases_number);
136
137 new_biases = parameters.slice(Eigen::array<Eigen::Index, 1>({0}), Eigen::array<Eigen::Index, 1>({biases_number}));
138
139 Eigen::array<Index, 2> two_dim{{1, biases.dimension(1)}};
140
141 return new_biases.reshape(two_dim);
142
143}
144
145
149
150Tensor<type, 1> PerceptronLayer::get_parameters() const
151{
152 Tensor<type, 1> parameters(synaptic_weights.size() + biases.size());
153
154 for(Index i = 0; i < biases.size(); i++)
155 {
156 fill_n(parameters.data()+i, 1, biases(i));
157 }
158
159 for(Index i = 0; i < synaptic_weights.size(); i++)
160 {
161 fill_n(parameters.data()+ biases.size() +i, 1, synaptic_weights(i));
162 }
163
164 return parameters;
165}
166
167
170
172{
173 return activation_function;
174}
175
176
179
181{
182 switch(activation_function)
183 {
184 case ActivationFunction::Logistic:
185 return "Logistic";
186
187 case ActivationFunction::HyperbolicTangent:
188 return "HyperbolicTangent";
189
190 case ActivationFunction::Threshold:
191 return "Threshold";
192
193 case ActivationFunction::SymmetricThreshold:
194 return "SymmetricThreshold";
195
196 case ActivationFunction::Linear:
197 return "Linear";
198
199 case ActivationFunction::RectifiedLinear:
200 return "RectifiedLinear";
201
202 case ActivationFunction::ScaledExponentialLinear:
203 return "ScaledExponentialLinear";
204
205 case ActivationFunction::SoftPlus:
206 return "SoftPlus";
207
208 case ActivationFunction::SoftSign:
209 return "SoftSign";
210
211 case ActivationFunction::HardSigmoid:
212 return "HardSigmoid";
213
214 case ActivationFunction::ExponentialLinear:
215 return "ExponentialLinear";
216 }
217
218 return string();
219}
220
221
224
226{
227 return display;
228}
229
230
233
235{
236 biases.resize(0, 0);
237
238 synaptic_weights.resize(0, 0);
239
240 set_default();
241}
242
243
248
249void PerceptronLayer::set(const Index& new_inputs_number, const Index& new_neurons_number,
250 const PerceptronLayer::ActivationFunction& new_activation_function)
251{
252 biases.resize(1, new_neurons_number);
253
254 synaptic_weights.resize(new_inputs_number, new_neurons_number);
255
257
258 activation_function = new_activation_function;
259
260 set_default();
261}
262
263
270
272{
273 layer_name = "perceptron_layer";
274
275 display = true;
276
277 layer_type = Type::Perceptron;
278}
279
280
281void PerceptronLayer::set_name(const string& new_layer_name)
282{
283 layer_name = new_layer_name;
284}
285
286
290
291void PerceptronLayer::set_inputs_number(const Index& new_inputs_number)
292{
293 const Index neurons_number = get_neurons_number();
294
295 biases.resize(1,neurons_number);
296
297 synaptic_weights.resize(new_inputs_number, neurons_number);
298}
299
300
304
305void PerceptronLayer::set_neurons_number(const Index& new_neurons_number)
306{
307 const Index inputs_number = get_inputs_number();
308
309 biases.resize(1, new_neurons_number);
310
311 synaptic_weights.resize(inputs_number, new_neurons_number);
312}
313
314
317
318void PerceptronLayer::set_biases(const Tensor<type, 2>& new_biases)
319{
320 biases = new_biases;
321}
322
323
329
330void PerceptronLayer::set_synaptic_weights(const Tensor<type, 2>& new_synaptic_weights)
331{
332 synaptic_weights = new_synaptic_weights;
333}
334
335
337
338void PerceptronLayer::set_parameters(const Tensor<type, 1>& new_parameters, const Index& index)
339{
340 const Index biases_number = get_biases_number();
341 const Index synaptic_weights_number = get_synaptic_weights_number();
342
343 memcpy(biases.data(),
344 new_parameters.data() + index,
345 static_cast<size_t>(biases_number)*sizeof(type));
346
347 memcpy(synaptic_weights.data(),
348 new_parameters.data() + biases_number + index,
349 static_cast<size_t>(synaptic_weights_number)*sizeof(type));
350}
351
352
355
357{
358 activation_function = new_activation_function;
359}
360
361
365
366void PerceptronLayer::set_activation_function(const string& new_activation_function_name)
367{
368 if(new_activation_function_name == "Logistic")
369 {
370 activation_function = ActivationFunction::Logistic;
371 }
372 else if(new_activation_function_name == "HyperbolicTangent")
373 {
374 activation_function = ActivationFunction::HyperbolicTangent;
375 }
376 else if(new_activation_function_name == "Threshold")
377 {
378 activation_function = ActivationFunction::Threshold;
379 }
380 else if(new_activation_function_name == "SymmetricThreshold")
381 {
382 activation_function = ActivationFunction::SymmetricThreshold;
383 }
384 else if(new_activation_function_name == "Linear")
385 {
386 activation_function = ActivationFunction::Linear;
387 }
388 else if(new_activation_function_name == "RectifiedLinear")
389 {
390 activation_function = ActivationFunction::RectifiedLinear;
391 }
392 else if(new_activation_function_name == "ScaledExponentialLinear")
393 {
394 activation_function = ActivationFunction::ScaledExponentialLinear;
395 }
396 else if(new_activation_function_name == "SoftPlus")
397 {
398 activation_function = ActivationFunction::SoftPlus;
399 }
400 else if(new_activation_function_name == "SoftSign")
401 {
402 activation_function = ActivationFunction::SoftSign;
403 }
404 else if(new_activation_function_name == "HardSigmoid")
405 {
406 activation_function = ActivationFunction::HardSigmoid;
407 }
408 else if(new_activation_function_name == "ExponentialLinear")
409 {
410 activation_function = ActivationFunction::ExponentialLinear;
411 }
412 else
413 {
414 ostringstream buffer;
415
416 buffer << "OpenNN Exception: PerceptronLayer class.\n"
417 << "void set_activation_function(const string&) method.\n"
418 << "Unknown activation function: " << new_activation_function_name << ".\n";
419
420 throw logic_error(buffer.str());
421 }
422}
423
424
429
430void PerceptronLayer::set_display(const bool& new_display)
431{
432 display = new_display;
433}
434
435
438
440{
441 biases.setConstant(value);
442}
443
444
447
449{
450 synaptic_weights.setConstant(value);
451}
452
453
456
458{
459 biases.setConstant(value);
460
461 synaptic_weights.setConstant(value);
462}
463
464
467
469{
470 const type minimum = type(-0.2);
471 const type maximum = type(0.2);
472
473 for(Index i = 0; i < biases.size(); i++)
474 {
475 const type random = static_cast<type>(rand()/(RAND_MAX+1.0));
476
477 biases(i) = minimum + (maximum - minimum)*random;
478 }
479
480 for(Index i = 0; i < synaptic_weights.size(); i++)
481 {
482 const type random = static_cast<type>(rand()/(RAND_MAX+1.0));
483
484 synaptic_weights(i) = minimum + (maximum - minimum)*random;
485 }
486}
487
488
489void PerceptronLayer::calculate_combinations(const Tensor<type, 2>& inputs,
490 const Tensor<type, 2>& biases,
491 const Tensor<type, 2>& synaptic_weights,
492 Tensor<type, 2>& combinations) const
493{
494#ifdef OPENNN_DEBUG
495 check_columns_number(inputs, get_inputs_number(), LOG);
496 // check_dimensions(biases, 1, get_neurons_number(), LOG);
497 check_dimensions(synaptic_weights, get_inputs_number(), get_neurons_number(), LOG);
498 check_dimensions(combinations, inputs.dimension(0), get_neurons_number(), LOG);
499#endif
500
501 const Index batch_samples_number = inputs.dimension(0);
502 const Index biases_number = get_biases_number();
503
504 for(Index i = 0; i < biases_number; i++)
505 {
506 fill_n(combinations.data() + i*batch_samples_number, batch_samples_number, biases(i));
507 }
508
509 combinations.device(*thread_pool_device) += inputs.contract(synaptic_weights, A_B);
510}
511
512
513void PerceptronLayer::calculate_activations(const Tensor<type, 2>& combinations, Tensor<type, 2>& activations) const
514{
515#ifdef OPENNN_DEBUG
516 check_columns_number(combinations, get_neurons_number(), LOG);
517 check_dimensions(activations, combinations.dimension(0), get_neurons_number(), LOG);
518#endif
519
520 switch(activation_function)
521 {
522 case ActivationFunction::Linear: linear(combinations, activations); return;
523
524 case ActivationFunction::Logistic: logistic(combinations, activations); return;
525
526 case ActivationFunction::HyperbolicTangent: hyperbolic_tangent(combinations, activations); return;
527
528 case ActivationFunction::Threshold: threshold(combinations, activations); return;
529
530 case ActivationFunction::SymmetricThreshold: symmetric_threshold(combinations, activations); return;
531
532 case ActivationFunction::RectifiedLinear: rectified_linear(combinations, activations); return;
533
534 case ActivationFunction::ScaledExponentialLinear: scaled_exponential_linear(combinations, activations); return;
535
536 case ActivationFunction::SoftPlus: soft_plus(combinations, activations); return;
537
538 case ActivationFunction::SoftSign: soft_sign(combinations, activations); return;
539
540 case ActivationFunction::HardSigmoid: hard_sigmoid(combinations, activations); return;
541
542 case ActivationFunction::ExponentialLinear: exponential_linear(combinations, activations); return;
543 }
544}
545
546
547void PerceptronLayer::calculate_activations_derivatives(const Tensor<type, 2>& combinations,
548 Tensor<type, 2>& activations,
549 Tensor<type, 2>& activations_derivatives) const
550{
551#ifdef OPENNN_DEBUG
552 check_columns_number(combinations, get_neurons_number(), LOG);
553 check_dimensions(activations, combinations.dimension(0), get_neurons_number(), LOG);
554 check_dimensions(activations_derivatives, combinations.dimension(0), get_neurons_number(), LOG);
555#endif
556
557 switch(activation_function)
558 {
559 case ActivationFunction::Linear: linear_derivatives(combinations, activations, activations_derivatives); return;
560
561 case ActivationFunction::Logistic: logistic_derivatives(combinations, activations, activations_derivatives); return;
562
563 case ActivationFunction::HyperbolicTangent: hyperbolic_tangent_derivatives(combinations, activations, activations_derivatives); return;
564
565 case ActivationFunction::Threshold: threshold_derivatives(combinations, activations, activations_derivatives); return;
566
567 case ActivationFunction::SymmetricThreshold: symmetric_threshold_derivatives(combinations, activations, activations_derivatives); return;
568
569 case ActivationFunction::RectifiedLinear: rectified_linear_derivatives(combinations, activations, activations_derivatives); return;
570
571 case ActivationFunction::ScaledExponentialLinear: scaled_exponential_linear_derivatives(combinations, activations, activations_derivatives); return;
572
573 case ActivationFunction::SoftPlus: soft_plus_derivatives(combinations, activations, activations_derivatives); return;
574
575 case ActivationFunction::SoftSign: soft_sign_derivatives(combinations, activations, activations_derivatives); return;
576
577 case ActivationFunction::HardSigmoid: hard_sigmoid_derivatives(combinations, activations, activations_derivatives); return;
578
579 case ActivationFunction::ExponentialLinear: exponential_linear_derivatives(combinations, activations, activations_derivatives); return;
580 }
581}
582
583
584Tensor<type, 2> PerceptronLayer::calculate_outputs(const Tensor<type, 2>& inputs)
585{
586#ifdef OPENNN_DEBUG
587 check_columns_number(inputs, get_inputs_number(), LOG);
588#endif
589
590 const Index batch_size = inputs.dimension(0);
591 const Index outputs_number = get_neurons_number();
592
593 Tensor<type, 2> outputs(batch_size, outputs_number);
594
595 calculate_combinations(inputs, biases, synaptic_weights, outputs);
596
597 calculate_activations(outputs, outputs);
598
599 return outputs;
600}
601
602
603void PerceptronLayer::forward_propagate(const Tensor<type, 2>& inputs,
604 LayerForwardPropagation* forward_propagation)
605{
606#ifdef OPENNN_DEBUG
607 check_columns_number(inputs, get_inputs_number(), LOG);
608#endif
609
610 PerceptronLayerForwardPropagation* perceptron_layer_forward_propagation
611 = static_cast<PerceptronLayerForwardPropagation*>(forward_propagation);
612
613 calculate_combinations(inputs,
614 biases,
616 perceptron_layer_forward_propagation->combinations);
617
618 calculate_activations_derivatives(perceptron_layer_forward_propagation->combinations,
619 perceptron_layer_forward_propagation->activations,
620 perceptron_layer_forward_propagation->activations_derivatives);
621}
622
623
624void PerceptronLayer::forward_propagate(const Tensor<type, 2>& inputs,
625 Tensor<type, 1> potential_parameters,
626 LayerForwardPropagation* forward_propagation)
627{
628#ifdef OPENNN_DEBUG
629 check_columns_number(inputs, get_inputs_number(), LOG);
630 check_size(potential_parameters, get_parameters_number(), LOG);
631#endif
632
633 const Index neurons_number = get_neurons_number();
634
635 const Index inputs_number = get_inputs_number();
636
637 const TensorMap<Tensor<type, 2>> potential_biases(potential_parameters.data(), neurons_number, 1);
638
639 const TensorMap<Tensor<type, 2>> potential_synaptic_weights(potential_parameters.data()+neurons_number, inputs_number, neurons_number);
640
641 PerceptronLayerForwardPropagation* perceptron_layer_forward_propagation
642 = static_cast<PerceptronLayerForwardPropagation*>(forward_propagation);
643
644 calculate_combinations(inputs,
645 potential_biases,
646 potential_synaptic_weights,
647 perceptron_layer_forward_propagation->combinations);
648
649 calculate_activations_derivatives(perceptron_layer_forward_propagation->combinations,
650 perceptron_layer_forward_propagation->activations,
651 perceptron_layer_forward_propagation->activations_derivatives);
652}
653
654
655void PerceptronLayer::calculate_hidden_delta(LayerForwardPropagation* next_layer_forward_propagation,
656 LayerBackPropagation* next_layer_back_propagation,
657 LayerBackPropagation* layer_back_propagation) const
658{
659 PerceptronLayerBackPropagation* perceptron_layer_back_propagation =
660 static_cast<PerceptronLayerBackPropagation*>(layer_back_propagation);
661
662 switch(next_layer_back_propagation->layer_pointer->get_type())
663 {
664 case Type::Perceptron:
665 {
666 PerceptronLayerForwardPropagation* next_perceptron_layer_forward_propagation =
667 static_cast<PerceptronLayerForwardPropagation*>(next_layer_forward_propagation);
668
669 PerceptronLayerBackPropagation* next_perceptron_layer_back_propagation =
670 static_cast<PerceptronLayerBackPropagation*>(next_layer_back_propagation);
671
672 calculate_hidden_delta_perceptron(next_perceptron_layer_forward_propagation,
673 next_perceptron_layer_back_propagation,
674 perceptron_layer_back_propagation);
675 }
676 break;
677
678 case Type::Probabilistic:
679 {
680 ProbabilisticLayerForwardPropagation* next_probabilistic_layer_forward_propagation =
681 static_cast<ProbabilisticLayerForwardPropagation*>(next_layer_forward_propagation);
682
683 ProbabilisticLayerBackPropagation* next_probabilistic_layer_back_propagation =
684 static_cast<ProbabilisticLayerBackPropagation*>(next_layer_back_propagation);
685
686 calculate_hidden_delta_probabilistic(next_probabilistic_layer_forward_propagation,
687 next_probabilistic_layer_back_propagation,
688 perceptron_layer_back_propagation);
689 }
690 break;
691
692 default: return;
693 }
694}
695
696
697void PerceptronLayer::calculate_hidden_delta_perceptron(PerceptronLayerForwardPropagation* next_forward_propagation,
698 PerceptronLayerBackPropagation* next_back_propagation,
699 PerceptronLayerBackPropagation* back_propagation) const
700{
701 const Tensor<type, 2>& next_synaptic_weights = static_cast<PerceptronLayer*>(next_back_propagation->layer_pointer)->get_synaptic_weights();
702
703 back_propagation->delta.device(*thread_pool_device) =
704 (next_back_propagation->delta*next_forward_propagation->activations_derivatives).contract(next_synaptic_weights, A_BT);
705}
706
707
708void PerceptronLayer::calculate_hidden_delta_probabilistic(ProbabilisticLayerForwardPropagation* next_forward_propagation,
709 ProbabilisticLayerBackPropagation* next_back_propagation,
710 PerceptronLayerBackPropagation* back_propagation) const
711{
712 const ProbabilisticLayer* probabilistic_layer_pointer = static_cast<ProbabilisticLayer*>(next_back_propagation->layer_pointer);
713
714 const Tensor<type, 2>& next_synaptic_weights = probabilistic_layer_pointer->get_synaptic_weights();
715
716 const Index batch_samples_number = back_propagation->batch_samples_number;
717
718 const Index next_neurons_number = probabilistic_layer_pointer->get_biases_number();
719
720 if(probabilistic_layer_pointer->get_neurons_number() == 1) // Binary
721 {
722 TensorMap< Tensor<type, 2> > activations_derivatives_2d(next_forward_propagation->activations_derivatives.data(),
723 batch_samples_number, next_neurons_number);
724 back_propagation->delta.device(*thread_pool_device) =
725 (next_back_propagation->delta*activations_derivatives_2d.reshape(Eigen::array<Index,2> {{activations_derivatives_2d.dimension(0),1}})).contract(next_synaptic_weights, A_BT);
726 }
727 else // Multiple
728 {
729 if(probabilistic_layer_pointer->get_activation_function() != ProbabilisticLayer::ActivationFunction::Softmax)
730 {
731 back_propagation->delta.device(*thread_pool_device) =
732 (next_back_propagation->delta*next_forward_propagation->activations_derivatives.reshape(Eigen::array<Index,2> {{next_forward_propagation->activations_derivatives.dimension(0),1}})).contract(next_synaptic_weights, A_BT);
733 }
734 else
735 {
736 const Index samples_number = next_back_propagation->delta.dimension(0);
737 const Index outputs_number = next_back_propagation->delta.dimension(1);
738 const Index next_layer_neurons_number = probabilistic_layer_pointer->get_neurons_number();
739
740 if(outputs_number != next_layer_neurons_number)
741 {
742 ostringstream buffer;
743
744 buffer << "OpenNN Exception: ProbabilisticLayer class.\n"
745 << "void calculate_hidden_delta_probabilistic(ProbabilisticLayerForwardPropagation*,ProbabilisticLayerBackPropagation*,PerceptronLayerBackPropagation*) const.\n"
746 << "Number of columns in delta (" << outputs_number << ") must be equal to number of neurons in probabilistic layer (" << next_layer_neurons_number << ").\n";
747
748 throw logic_error(buffer.str());
749 }
750
751 if(next_forward_propagation->activations_derivatives.dimension(1) != next_layer_neurons_number)
752 {
753 ostringstream buffer;
754
755 buffer << "OpenNN Exception: ProbabilisticLayer class.\n"
756 << "void calculate_hidden_delta_probabilistic(ProbabilisticLayerForwardPropagation*,ProbabilisticLayerBackPropagation*,PerceptronLayerBackPropagation*) const.\n"
757 << "Dimension 1 of activations derivatives (" << outputs_number << ") must be equal to number of neurons in probabilistic layer (" << next_layer_neurons_number << ").\n";
758
759 throw logic_error(buffer.str());
760 }
761
762 if(next_forward_propagation->activations_derivatives.dimension(2) != next_layer_neurons_number)
763 {
764 ostringstream buffer;
765
766 buffer << "OpenNN Exception: ProbabilisticLayer class.\n"
767 << "void calculate_hidden_delta_probabilistic(ProbabilisticLayerForwardPropagation*,ProbabilisticLayerBackPropagation*,PerceptronLayerBackPropagation*) const.\n"
768 << "Dimension 2 of activations derivatives (" << outputs_number << ") must be equal to number of neurons in probabilistic layer (" << next_layer_neurons_number << ").\n";
769
770 throw logic_error(buffer.str());
771 }
772
773 const Index step = next_layer_neurons_number*next_layer_neurons_number;
774
775 for(Index i = 0; i < samples_number; i++)
776 {
777 next_back_propagation->delta_row = next_back_propagation->delta.chip(i,0);
778
779 TensorMap< Tensor<type, 2> > activations_derivatives_matrix(next_forward_propagation->activations_derivatives.data() + i*step,
780 next_layer_neurons_number, next_layer_neurons_number);
781
782 next_back_propagation->error_combinations_derivatives.chip(i,0) =
783 next_back_propagation->delta_row.contract(activations_derivatives_matrix, AT_B);
784 }
785
786 back_propagation->delta.device(*thread_pool_device) =
787 next_back_propagation->error_combinations_derivatives.contract(next_synaptic_weights, A_BT);
788 }
789 }
790}
791
792
793void PerceptronLayer::calculate_hidden_delta_lm(LayerForwardPropagation* next_layer_forward_propagation,
794 LayerBackPropagationLM* next_layer_back_propagation,
795 LayerBackPropagationLM* layer_back_propagation) const
796{
797 PerceptronLayerBackPropagationLM* perceptron_layer_back_propagation =
798 static_cast<PerceptronLayerBackPropagationLM*>(layer_back_propagation);
799
800 switch(next_layer_back_propagation->layer_pointer->get_type())
801 {
802 case Type::Perceptron:
803 {
804 PerceptronLayerForwardPropagation* next_perceptron_layer_forward_propagation =
805 static_cast<PerceptronLayerForwardPropagation*>(next_layer_forward_propagation);
806
807 PerceptronLayerBackPropagationLM* next_perceptron_layer_back_propagation =
808 static_cast<PerceptronLayerBackPropagationLM*>(next_layer_back_propagation);
809
810 calculate_hidden_delta_perceptron_lm(next_perceptron_layer_forward_propagation,
811 next_perceptron_layer_back_propagation,
812 perceptron_layer_back_propagation);
813 }
814 break;
815
816 case Type::Probabilistic:
817 {
818 ProbabilisticLayerForwardPropagation* next_probabilistic_layer_forward_propagation =
819 static_cast<ProbabilisticLayerForwardPropagation*>(next_layer_forward_propagation);
820
821 ProbabilisticLayerBackPropagationLM* next_probabilistic_layer_back_propagation =
822 static_cast<ProbabilisticLayerBackPropagationLM*>(next_layer_back_propagation);
823
824 calculate_hidden_delta_probabilistic_lm(next_probabilistic_layer_forward_propagation,
825 next_probabilistic_layer_back_propagation,
826 perceptron_layer_back_propagation);
827 }
828 break;
829
830 default: return;
831 }
832}
833
834
835void PerceptronLayer::calculate_hidden_delta_perceptron_lm(PerceptronLayerForwardPropagation* next_forward_propagation,
836 PerceptronLayerBackPropagationLM* next_back_propagation,
837 PerceptronLayerBackPropagationLM* back_propagation) const
838{
839 const Tensor<type, 2>& next_synaptic_weights = static_cast<PerceptronLayer*>(next_back_propagation->layer_pointer)->get_synaptic_weights();
840
841 back_propagation->delta.device(*thread_pool_device) =
842 (next_back_propagation->delta*next_forward_propagation->activations_derivatives.reshape(Eigen::array<Index,2> {{next_forward_propagation->activations_derivatives.dimension(0),1}})).contract(next_synaptic_weights, A_BT);
843}
844
845
846void PerceptronLayer::calculate_hidden_delta_probabilistic_lm(ProbabilisticLayerForwardPropagation* next_forward_propagation,
847 ProbabilisticLayerBackPropagationLM* next_back_propagation,
848 PerceptronLayerBackPropagationLM* back_propagation) const
849{
850 const ProbabilisticLayer* probabilistic_layer_pointer = static_cast<ProbabilisticLayer*>(next_back_propagation->layer_pointer);
851
852 const Tensor<type, 2>& next_synaptic_weights = probabilistic_layer_pointer->get_synaptic_weights();
853
854 if(probabilistic_layer_pointer->get_activation_function() == ProbabilisticLayer::ActivationFunction::Softmax)
855 {
856 const Index samples_number = next_back_propagation->delta.dimension(0);
857 const Index outputs_number = next_back_propagation->delta.dimension(1);
858 const Index next_layer_neurons_number = probabilistic_layer_pointer->get_neurons_number();
859
860 if(outputs_number != next_layer_neurons_number)
861 {
862 ostringstream buffer;
863
864 buffer << "OpenNN Exception: ProbabilisticLayer class.\n"
865 << "void calculate_hidden_delta_probabilistic(ProbabilisticLayerForwardPropagation*,ProbabilisticLayerBackPropagationLM*,PerceptronLayerBackPropagationLM*) const.\n"
866 << "Number of columns in delta (" << outputs_number << ") must be equal to number of neurons in probabilistic layer (" << next_layer_neurons_number << ").\n";
867
868 throw logic_error(buffer.str());
869 }
870
871 if(next_forward_propagation->activations_derivatives.dimension(1) != next_layer_neurons_number)
872 {
873 ostringstream buffer;
874
875 buffer << "OpenNN Exception: ProbabilisticLayer class.\n"
876 << "void calculate_hidden_delta_probabilistic(ProbabilisticLayerForwardPropagation*,ProbabilisticLayerBackPropagationLM*,PerceptronLayerBackPropagationLM*) const.\n"
877 << "Dimension 1 of activations derivatives (" << outputs_number << ") must be equal to number of neurons in probabilistic layer (" << next_layer_neurons_number << ").\n";
878
879 throw logic_error(buffer.str());
880 }
881
882 if(next_forward_propagation->activations_derivatives.dimension(2) != next_layer_neurons_number)
883 {
884 ostringstream buffer;
885
886 buffer << "OpenNN Exception: ProbabilisticLayer class.\n"
887 << "void calculate_hidden_delta_probabilistic(ProbabilisticLayerForwardPropagation*,ProbabilisticLayerBackPropagationLM*,PerceptronLayerBackPropagationLM*) const.\n"
888 << "Dimension 2 of activations derivatives (" << outputs_number << ") must be equal to number of neurons in probabilistic layer (" << next_layer_neurons_number << ").\n";
889
890 throw logic_error(buffer.str());
891 }
892
893 const Index step = next_layer_neurons_number*next_layer_neurons_number;
894
895 for(Index i = 0; i < samples_number; i++)
896 {
897 next_back_propagation->delta_row = next_back_propagation->delta.chip(i,0);
898
899 TensorMap< Tensor<type, 2> > activations_derivatives_matrix(next_forward_propagation->activations_derivatives.data() + i*step,
900 next_layer_neurons_number, next_layer_neurons_number);
901
902 next_back_propagation->error_combinations_derivatives.chip(i,0) =
903 next_back_propagation->delta_row.contract(activations_derivatives_matrix, AT_B);
904 }
905
906 back_propagation->delta.device(*thread_pool_device) =
907 (next_back_propagation->error_combinations_derivatives).contract(next_synaptic_weights, A_BT);
908 }
909 else
910 {
911 back_propagation->delta.device(*thread_pool_device) =
912 (next_back_propagation->delta*next_forward_propagation->activations_derivatives.reshape(Eigen::array<Index,2> {{next_forward_propagation->activations_derivatives.dimension(0),1}})).contract(next_synaptic_weights, A_BT);
913 }
914}
915
916
917void PerceptronLayer::calculate_squared_errors_Jacobian_lm(const Tensor<type, 2>& inputs,
918 LayerForwardPropagation* forward_propagation,
919 LayerBackPropagationLM* back_propagation)
920{
921 PerceptronLayerForwardPropagation* perceptron_layer_forward_propagation =
922 static_cast<PerceptronLayerForwardPropagation*>(forward_propagation);
923
924 PerceptronLayerBackPropagationLM* perceptron_layer_back_propagation_lm =
925 static_cast<PerceptronLayerBackPropagationLM*>(back_propagation);
926
927 const Index samples_number = inputs.dimension(0);
928
929 const Index inputs_number = get_inputs_number();
930 const Index neurons_number = get_neurons_number();
931
932 Index parameter_index = 0;
933
934 for(Index sample = 0; sample < samples_number; sample++)
935 {
936 parameter_index = 0;
937
938 for(Index neuron = 0; neuron < neurons_number; neuron++)
939 {
940 for(Index input = 0; input < inputs_number; input++)
941 {
942 perceptron_layer_back_propagation_lm->squared_errors_Jacobian(sample, neurons_number+parameter_index) =
943 perceptron_layer_back_propagation_lm->delta(sample, neuron) *
944 perceptron_layer_forward_propagation->activations_derivatives(sample, neuron) *
945 inputs(sample, input);
946
947 parameter_index++;
948 }
949
950 perceptron_layer_back_propagation_lm->squared_errors_Jacobian(sample, neuron) =
951 perceptron_layer_back_propagation_lm->delta(sample, neuron) *
952 perceptron_layer_forward_propagation->activations_derivatives(sample, neuron);
953 }
954 }
955}
956
957
958void PerceptronLayer::insert_squared_errors_Jacobian_lm(LayerBackPropagationLM * back_propagation ,
959 const Index & index,
960 Tensor<type, 2>& squared_errors_Jacobian) const
961{
962 PerceptronLayerBackPropagationLM* perceptron_layer_back_propagation_lm =
963 static_cast<PerceptronLayerBackPropagationLM*>(back_propagation);
964
965 const Index batch_samples_number = perceptron_layer_back_propagation_lm->squared_errors_Jacobian.dimension(0);
966 const Index layer_parameters_number = get_parameters_number();
967
968 memcpy(squared_errors_Jacobian.data() + index,
969 perceptron_layer_back_propagation_lm->squared_errors_Jacobian.data(),
970 static_cast<size_t>(layer_parameters_number*batch_samples_number)*sizeof(type));
971}
972
973
974void PerceptronLayer::calculate_error_gradient(const Tensor<type, 2>& inputs,
975 LayerForwardPropagation* forward_propagation,
976 LayerBackPropagation* back_propagation) const
977{
978 PerceptronLayerForwardPropagation* perceptron_layer_forward_propagation =
979 static_cast<PerceptronLayerForwardPropagation*>(forward_propagation);
980
981 PerceptronLayerBackPropagation* perceptron_layer_back_propagation =
982 static_cast<PerceptronLayerBackPropagation*>(back_propagation);
983
984 perceptron_layer_back_propagation->biases_derivatives.device(*thread_pool_device) =
985 (perceptron_layer_back_propagation->delta*perceptron_layer_forward_propagation->activations_derivatives).sum(Eigen::array<Index, 1>({0}));
986
987 perceptron_layer_back_propagation->synaptic_weights_derivatives.device(*thread_pool_device) =
988 inputs.contract(perceptron_layer_back_propagation->delta*perceptron_layer_forward_propagation->activations_derivatives, AT_B);
989}
990
991
992void PerceptronLayer::insert_gradient(LayerBackPropagation* back_propagation,
993 const Index& index,
994 Tensor<type, 1>& gradient) const
995{
996 PerceptronLayerBackPropagation* perceptron_layer_back_propagation =
997 static_cast<PerceptronLayerBackPropagation*>(back_propagation);
998
999 const Index biases_number = get_biases_number();
1000 const Index synaptic_weights_number = get_synaptic_weights_number();
1001
1002 memcpy(gradient.data() + index,
1003 perceptron_layer_back_propagation->biases_derivatives.data(),
1004 static_cast<size_t>(biases_number)*sizeof(type));
1005
1006 memcpy(gradient.data() + index + biases_number,
1007 perceptron_layer_back_propagation->synaptic_weights_derivatives.data(),
1008 static_cast<size_t>(synaptic_weights_number)*sizeof(type));
1009}
1010
1011
1015
1016string PerceptronLayer::write_expression(const Tensor<string, 1>& inputs_names, const Tensor<string, 1>& outputs_names) const
1017{
1018#ifdef OPENNN_DEBUG
1019 // check_size(inputs_names, get_inputs_number(), LOG);
1020 // check_size(outputs_names, get_neurons_number(), LOG);
1021#endif
1022
1023 ostringstream buffer;
1024
1025 for(Index j = 0; j < outputs_names.size(); j++)
1026 {
1027 const Tensor<type, 1> synaptic_weights_column = synaptic_weights.chip(j,1);
1028
1029 buffer << outputs_names[j] << " = " << write_activation_function_expression() << "( " << biases(0,j) << " +";
1030
1031 for(Index i = 0; i < inputs_names.size() - 1; i++)
1032 {
1033 buffer << " (" << inputs_names[i] << "*" << synaptic_weights_column(i) << ") +";
1034 }
1035
1036 buffer << " (" << inputs_names[inputs_names.size() - 1] << "*" << synaptic_weights_column[inputs_names.size() - 1] << ") );\n";
1037 }
1038
1039 return buffer.str();
1040}
1041
1042
1043void PerceptronLayer::from_XML(const tinyxml2::XMLDocument& document)
1044{
1045 ostringstream buffer;
1046
1047 // Perceptron layer
1048
1049 const tinyxml2::XMLElement* perceptron_layer_element = document.FirstChildElement("PerceptronLayer");
1050
1051 if(!perceptron_layer_element)
1052 {
1053 buffer << "OpenNN Exception: PerceptronLayer class.\n"
1054 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1055 << "PerceptronLayer element is nullptr.\n";
1056
1057 throw logic_error(buffer.str());
1058 }
1059
1060 // Layer name
1061
1062 const tinyxml2::XMLElement* layer_name_element = perceptron_layer_element->FirstChildElement("LayerName");
1063
1064 if(!layer_name_element)
1065 {
1066 buffer << "OpenNN Exception: PerceptronLayer class.\n"
1067 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1068 << "LayerName element is nullptr.\n";
1069
1070 throw logic_error(buffer.str());
1071 }
1072
1073 if(layer_name_element->GetText())
1074 {
1075 set_name(layer_name_element->GetText());
1076 }
1077
1078 // Inputs number
1079
1080 const tinyxml2::XMLElement* inputs_number_element = perceptron_layer_element->FirstChildElement("InputsNumber");
1081
1082 if(!inputs_number_element)
1083 {
1084 buffer << "OpenNN Exception: PerceptronLayer class.\n"
1085 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1086 << "InputsNumber element is nullptr.\n";
1087
1088 throw logic_error(buffer.str());
1089 }
1090
1091 if(inputs_number_element->GetText())
1092 {
1093 set_inputs_number(static_cast<Index>(stoi(inputs_number_element->GetText())));
1094 }
1095
1096 // Neurons number
1097
1098 const tinyxml2::XMLElement* neurons_number_element = perceptron_layer_element->FirstChildElement("NeuronsNumber");
1099
1100 if(!neurons_number_element)
1101 {
1102 buffer << "OpenNN Exception: PerceptronLayer class.\n"
1103 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1104 << "NeuronsNumber element is nullptr.\n";
1105
1106 throw logic_error(buffer.str());
1107 }
1108
1109 if(neurons_number_element->GetText())
1110 {
1111 set_neurons_number(static_cast<Index>(stoi(neurons_number_element->GetText())));
1112 }
1113
1114 // Activation function
1115
1116 const tinyxml2::XMLElement* activation_function_element = perceptron_layer_element->FirstChildElement("ActivationFunction");
1117
1118 if(!activation_function_element)
1119 {
1120 buffer << "OpenNN Exception: PerceptronLayer class.\n"
1121 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1122 << "ActivationFunction element is nullptr.\n";
1123
1124 throw logic_error(buffer.str());
1125 }
1126
1127 if(activation_function_element->GetText())
1128 {
1129 set_activation_function(activation_function_element->GetText());
1130 }
1131
1132 // Parameters
1133
1134 const tinyxml2::XMLElement* parameters_element = perceptron_layer_element->FirstChildElement("Parameters");
1135
1136 if(!parameters_element)
1137 {
1138 buffer << "OpenNN Exception: PerceptronLayer class.\n"
1139 << "void from_XML(const tinyxml2::XMLDocument&) method.\n"
1140 << "Parameters element is nullptr.\n";
1141
1142 throw logic_error(buffer.str());
1143 }
1144
1145 if(parameters_element->GetText())
1146 {
1147 const string parameters_string = parameters_element->GetText();
1148
1149 set_parameters(to_type_vector(parameters_string, ' '));
1150 }
1151}
1152
1153
1154void PerceptronLayer::write_XML(tinyxml2::XMLPrinter& file_stream) const
1155{
1156 ostringstream buffer;
1157
1158 // Perceptron layer
1159
1160 file_stream.OpenElement("PerceptronLayer");
1161
1162 // Layer name
1163 file_stream.OpenElement("LayerName");
1164 buffer.str("");
1165 buffer << layer_name;
1166 file_stream.PushText(buffer.str().c_str());
1167 file_stream.CloseElement();
1168
1169 // Inputs number
1170 file_stream.OpenElement("InputsNumber");
1171
1172 buffer.str("");
1173 buffer << get_inputs_number();
1174
1175 file_stream.PushText(buffer.str().c_str());
1176
1177 file_stream.CloseElement();
1178
1179 // Outputs number
1180
1181 file_stream.OpenElement("NeuronsNumber");
1182
1183 buffer.str("");
1184 buffer << get_neurons_number();
1185
1186 file_stream.PushText(buffer.str().c_str());
1187
1188 file_stream.CloseElement();
1189
1190 // Activation function
1191
1192 file_stream.OpenElement("ActivationFunction");
1193
1194 file_stream.PushText(write_activation_function().c_str());
1195
1196 file_stream.CloseElement();
1197
1198 // Parameters
1199
1200 file_stream.OpenElement("Parameters");
1201
1202 buffer.str("");
1203
1204 const Tensor<type, 1> parameters = get_parameters();
1205 const Index parameters_size = parameters.size();
1206
1207 for(Index i = 0; i < parameters_size; i++)
1208 {
1209 buffer << parameters(i);
1210
1211 if(i != (parameters_size-1)) buffer << " ";
1212 }
1213
1214 file_stream.PushText(buffer.str().c_str());
1215
1216 file_stream.CloseElement();
1217
1218 // Peceptron layer (end tag)
1219
1220 file_stream.CloseElement();
1221}
1222
1223
1224string PerceptronLayer::write_activation_function_expression() const
1225{
1226 switch(activation_function)
1227 {
1228 case ActivationFunction::Threshold:
1229 return "threshold";
1230
1231 case ActivationFunction::SymmetricThreshold:
1232 return "symmetric_threshold";
1233
1234 case ActivationFunction::Logistic:
1235 return "logistic";
1236
1237 case ActivationFunction::HyperbolicTangent:
1238 return "tanh";
1239
1240 case ActivationFunction::Linear:
1241 return string();
1242
1243 case ActivationFunction::RectifiedLinear:
1244 return "ReLU";
1245
1246 case ActivationFunction::ExponentialLinear:
1247 return "ELU";
1248
1249 case ActivationFunction::ScaledExponentialLinear:
1250 return "SELU";
1251
1252 case ActivationFunction::SoftPlus:
1253 return "soft_plus";
1254
1255 case ActivationFunction::SoftSign:
1256 return "soft_sign";
1257
1258 case ActivationFunction::HardSigmoid:
1259 return "hard_sigmoid";
1260 }
1261
1262 return string();
1263}
1264
1265
1266string PerceptronLayer::write_combinations_c() const
1267{
1268 ostringstream buffer;
1269
1270 const Index inputs_number = get_inputs_number();
1271 const Index neurons_number = get_neurons_number();
1272
1273 buffer << "\tvector<float> combinations(" << neurons_number << ");\n" << endl;
1274
1275 for(Index i = 0; i < neurons_number; i++)
1276 {
1277 buffer << "\tcombinations[" << i << "] = " << biases(i);
1278
1279 for(Index j = 0; j < inputs_number; j++)
1280 {
1281 buffer << " +" << synaptic_weights(j, i) << "*inputs[" << j << "]";
1282 }
1283
1284 buffer << ";" << endl;
1285 }
1286
1287 return buffer.str();
1288}
1289
1290
1292{
1293 ostringstream buffer;
1294
1295 const Index neurons_number = get_neurons_number();
1296
1297 buffer << "\n\tvector<float> activations(" << neurons_number << ");\n" << endl;
1298
1299 for(Index i = 0; i < neurons_number; i++)
1300 {
1301 buffer << "\tactivations[" << i << "] = ";
1302
1303 switch(activation_function)
1304 {
1305 case ActivationFunction::HyperbolicTangent:
1306 buffer << "tanh(combinations[" << i << "]);\n";
1307 break;
1308
1309 case ActivationFunction::RectifiedLinear:
1310 buffer << "combinations[" << i << "] < 0.0 ? 0.0 : combinations[" << i << "];\n";
1311 break;
1312
1313 case ActivationFunction::Logistic:
1314 buffer << "1.0/(1.0 + exp(-combinations[" << i << "]));\n";
1315 break;
1316
1317 case ActivationFunction::Threshold:
1318 buffer << "combinations[" << i << "] >= 0.0 ? 1.0 : 0.0;\n";
1319 break;
1320
1321 case ActivationFunction::SymmetricThreshold:
1322 buffer << "combinations[" << i << "] >= 0.0 ? 1.0 : -1.0;\n";
1323 break;
1324
1325 case ActivationFunction::Linear:
1326 buffer << "combinations[" << i << "];\n";
1327 break;
1328
1329 case ActivationFunction::ScaledExponentialLinear:
1330 buffer << "combinations[" << i << "] < 0.0 ? 1.0507*1.67326*(exp(combinations[" << i << "]) - 1.0) : 1.0507*combinations[" << i << "];\n";
1331 break;
1332
1333 case ActivationFunction::SoftPlus:
1334 buffer << "log(1.0 + exp(combinations[" << i << "]));\n";
1335 break;
1336
1337 case ActivationFunction::SoftSign:
1338 buffer << "combinations[" << i << "] < 0.0 ? combinations[" << i << "]/(1.0 - combinations[" << i << "] ) : combinations[" << i << "]/(1.0 + combinations[" << i << "] );\n";
1339 break;
1340
1341 case ActivationFunction::ExponentialLinear:
1342 buffer << "combinations[" << i << "] < 0.0 ? 1.0*(exp(combinations[" << i << "]) - 1.0) : combinations[" << i << "];\n";
1343 break;
1344
1345 case ActivationFunction::HardSigmoid:
1347 break;
1348 }
1349 }
1350
1351 return buffer.str();
1352}
1353
1354
1355string PerceptronLayer::write_combinations_python() const
1356{
1357 ostringstream buffer;
1358
1359 const Index inputs_number = get_inputs_number();
1360 const Index neurons_number = get_neurons_number();
1361
1362 buffer << "\t\tcombinations = [None] * "<<neurons_number<<"\n" << endl;
1363
1364 for(Index i = 0; i < neurons_number; i++)
1365 {
1366 buffer << "\t\tcombinations[" << i << "] = " << biases(i);
1367
1368 for(Index j = 0; j < inputs_number; j++)
1369 {
1370 buffer << " +" << synaptic_weights(j, i) << "*inputs[" << j << "]";
1371 }
1372
1373 buffer << " " << endl;
1374 }
1375
1376 buffer << "\t\t" << endl;
1377
1378 return buffer.str();
1379}
1380
1381
1383{
1384 ostringstream buffer;
1385
1386 const Index neurons_number = get_neurons_number();
1387
1388 buffer << "\t\tactivations = [None] * "<<neurons_number<<"\n" << endl;
1389
1390 for(Index i = 0; i < neurons_number; i++)
1391 {
1392 buffer << "\t\tactivations[" << i << "] = ";
1393
1394 switch(activation_function)
1395 {
1396
1397 case ActivationFunction::HyperbolicTangent:
1398 buffer << "np.tanh(combinations[" << i << "])\n";
1399 break;
1400
1401 case ActivationFunction::RectifiedLinear:
1402 buffer << "np.maximum(0.0, combinations[" << i << "])\n";
1403 break;
1404
1405 case ActivationFunction::Logistic:
1406 buffer << "1.0/(1.0 + np.exp(-combinations[" << i << "]))\n";
1407 break;
1408
1409 case ActivationFunction::Threshold:
1410 buffer << "1.0 if combinations[" << i << "] >= 0.0 else 0.0\n";
1411 break;
1412
1413 case ActivationFunction::SymmetricThreshold:
1414 buffer << "1.0 if combinations[" << i << "] >= 0.0 else -1.0\n";
1415 break;
1416
1417 case ActivationFunction::Linear:
1418 buffer << "combinations[" << i << "]\n";
1419 break;
1420
1421 case ActivationFunction::ScaledExponentialLinear:
1422 buffer << "1.0507*1.67326*(np.exp(combinations[" << i << "]) - 1.0) if combinations[" << i << "] < 0.0 else 1.0507*combinations[" << i << "]\n";
1423 break;
1424
1425 case ActivationFunction::SoftPlus:
1426 buffer << "np.log(1.0 + np.exp(combinations[" << i << "]))\n";
1427 break;
1428
1429 case ActivationFunction::SoftSign:
1430 buffer << "combinations[" << i << "]/(1.0 - combinations[" << i << "] ) if combinations[" << i << "] < 0.0 else combinations[" << i << "]/(1.0 + combinations[" << i << "] )\n";
1431 break;
1432
1433 case ActivationFunction::ExponentialLinear:
1434 buffer << "1.0*(np.exp(combinations[" << i << "]) - 1.0) if combinations[" << i << "] < 0.0 else combinations[" << i << "]\n";
1435 break;
1436
1437 case ActivationFunction::HardSigmoid:
1439 break;
1440 }
1441 }
1442
1443 return buffer.str();
1444}
1445
1446
1447string PerceptronLayer::write_expression_c() const
1448{
1449 ostringstream buffer;
1450
1451 buffer << "vector<float> " << layer_name << "(const vector<float>& inputs)\n{" << endl;
1452
1453 buffer << write_combinations_c();
1454
1455 buffer << write_activations_c();
1456
1457 buffer << "\n\treturn activations;\n}" << endl;
1458
1459 return buffer.str();
1460}
1461
1462
1463string PerceptronLayer::write_expression_python() const
1464{
1465 ostringstream buffer;
1466
1467 buffer << "\tdef " << layer_name << "(self,inputs):\n" << endl;
1468
1469 buffer << write_combinations_python();
1470
1471 buffer << write_activations_python();
1472
1473 buffer << "\n\t\treturn activations;\n" << endl;
1474
1475 return buffer.str();
1476}
1477
1478}
1479
1480// OpenNN: Open Neural Networks Library.
1481// Copyright(C) 2005-2021 Artificial Intelligence Techniques, SL.
1482//
1483// This library is free software; you can redistribute it and/or
1484// modify it under the terms of the GNU Lesser General Public
1485// License as published by the Free Software Foundation; either
1486// version 2.1 of the License, or any later version.
1487//
1488// This library is distributed in the hope that it will be useful,
1489// but WITHOUT ANY WARRANTY; without even the implied warranty of
1490// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
1491// Lesser General Public License for more details.
1492
1493// You should have received a copy of the GNU Lesser General Public
1494// License along with this library; if not, write to the Free Software
1495// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
This abstract class represents the concept of layer of neurons in OpenNN.
Definition: layer.h:53
string layer_name
Layer name.
Definition: layer.h:179
Type layer_type
Layer type.
Definition: layer.h:183
void set_parameters(const Tensor< type, 1 > &, const Index &index=0)
Sets the parameters of this layer.
string write_activation_function() const
void set_parameters_constant(const type &)
Index get_synaptic_weights_number() const
Returns the number of layer's synaptic weights.
string write_activations_python() const
const bool & get_display() const
Index get_inputs_number() const
Returns the number of inputs to the layer.
void set_biases_constant(const type &)
string write_expression(const Tensor< string, 1 > &, const Tensor< string, 1 > &) const
bool display
Display messages to screen.
ActivationFunction
Enumeration of available activation functions for the perceptron neuron model.
void set_activation_function(const ActivationFunction &)
const Tensor< type, 2 > & get_biases() const
Tensor< type, 2 > biases
void set_synaptic_weights(const Tensor< type, 2 > &)
void set_inputs_number(const Index &)
void set_biases(const Tensor< type, 2 > &)
Index get_neurons_number() const
Returns the number of neurons in the layer.
string write_activations_c() const
const PerceptronLayer::ActivationFunction & get_activation_function() const
void set_synaptic_weights_constant(const type &)
void set_display(const bool &)
Tensor< type, 2 > synaptic_weights
This matrix containing conection strengths from a layer's inputs to its neurons.
const Tensor< type, 2 > & get_synaptic_weights() const
Index get_parameters_number() const
Returns the number of parameters(biases and synaptic weights) of the layer.
ActivationFunction activation_function
Activation function variable.
void set_neurons_number(const Index &)
Tensor< type, 1 > get_parameters() const
void PushText(const char *text, bool cdata=false)
Add a text node.
Definition: tinyxml2.cpp:2878
virtual void CloseElement(bool compactMode=false)
If streaming, close the Element.
Definition: tinyxml2.cpp:2834