response_optimization.cpp
1// OpenNN: Open Neural Networks Library
2// www.opennn.net
3//
4// R E S P O N S E O P T I M I Z A T I O N C L A S S
5//
6// Artificial Intelligence Techniques SL
7// artelnics@artelnics.com
8
9#include "response_optimization.h"
10
11namespace OpenNN
12{
13
16
18{
19}
20
21
23{
24 neural_network_pointer = new_neural_network_pointer;
25
26 const Index inputs_number = neural_network_pointer->get_inputs_number();
27 const Index outputs_number = neural_network_pointer->get_outputs_number();
28
29 inputs_conditions.resize(inputs_number);
30 inputs_conditions.setConstant(Condition::Between);
31
32 outputs_conditions.resize(outputs_number);
33 outputs_conditions.setConstant(Condition::Minimum);
34
35 inputs_minimums = neural_network_pointer->get_scaling_layer_pointer()->get_minimums();
36 inputs_maximums = neural_network_pointer->get_scaling_layer_pointer()->get_maximums();
37
38 outputs_minimums = neural_network_pointer->get_bounding_layer_pointer()->get_lower_bounds();
39 outputs_maximums = neural_network_pointer->get_bounding_layer_pointer()->get_upper_bounds();
40}
41
42
44
46{
47}
48
49
50void ResponseOptimization::set_evaluations_number(const Index& new_evaluations_number)
51{
52 evaluations_number = new_evaluations_number;
53}
54
55
56Tensor<ResponseOptimization::Condition, 1> ResponseOptimization::get_inputs_conditions()
57{
58 return inputs_conditions;
59}
60
61
62Tensor<ResponseOptimization::Condition, 1> ResponseOptimization::get_outputs_conditions()
63{
64 return outputs_conditions;
65}
66
67
68Tensor<type, 1> ResponseOptimization::get_inputs_minimums()
69{
70 return inputs_minimums;
71}
72
73
74Tensor<type, 1> ResponseOptimization::get_inputs_maximums()
75{
76 return inputs_maximums;
77}
78
79
80Tensor<type, 1> ResponseOptimization::get_outputs_minimums()
81{
82 return outputs_minimums;
83}
84
85
86Tensor<type, 1> ResponseOptimization::get_outputs_maximums()
87{
88 return outputs_maximums;
89}
90
91void ResponseOptimization::set_input_condition(const string& name,
92 const ResponseOptimization::Condition& condition,
93 const Tensor<type, 1>& values)
94{
95 const Index index = neural_network_pointer->get_input_index(name);
96
97 set_input_condition(index, condition, values);
98}
99
100
101void ResponseOptimization::set_output_condition(const string& name, const ResponseOptimization::Condition& condition, const Tensor<type, 1>& values)
102{
103 const Index index = neural_network_pointer->get_output_index(name);
104
105 set_output_condition(index, condition, values);
106}
107
108
109void ResponseOptimization::set_input_condition(const Index& index, const ResponseOptimization::Condition& condition, const Tensor<type, 1>& values)
110{
111 inputs_conditions[index] = condition;
112
113 ostringstream buffer;
114
115 switch(condition)
116 {
117 case Condition::Minimum:
118
119 if(values.size() != 0)
120 {
121 buffer << "OpenNN Exception: ResponseOptimization class.\n"
122 << "void set_input_condition() method.\n"
123 << "For Minimum condition, size of values must be 0.\n";
124
125 throw logic_error(buffer.str());
126 }
127
128 return;
129
130 case Condition::Maximum:
131
132 if(values.size() != 0)
133 {
134 buffer << "OpenNN Exception: ResponseOptimization class.\n"
135 << "void set_input_condition() method.\n"
136 << "For Maximum condition, size of values must be 0.\n";
137
138 throw logic_error(buffer.str());
139 }
140
141 return;
142
143 case Condition::EqualTo:
144
145 if(values.size() != 1)
146 {
147 buffer << "OpenNN Exception: ResponseOptimization class.\n"
148 << "void set_input_condition() method.\n"
149 << "For LessEqualTo condition, size of values must be 1.\n";
150
151 throw logic_error(buffer.str());
152 }
153
154 inputs_minimums[index] = values[0];
155 inputs_maximums[index] = values[0];
156
157 return;
158
159 case Condition::LessEqualTo:
160
161 if(values.size() != 1)
162 {
163 buffer << "OpenNN Exception: ResponseOptimization class.\n"
164 << "void set_input_condition() method.\n"
165 << "For LessEqualTo condition, size of values must be 1.\n";
166
167 throw logic_error(buffer.str());
168 }
169
170 inputs_maximums[index] = values[0];
171
172 return;
173
174 case Condition::GreaterEqualTo:
175
176 if(values.size() != 1)
177 {
178 buffer << "OpenNN Exception: ResponseOptimization class.\n"
179 << "void set_input_condition() method.\n"
180 << "For LessEqualTo condition, size of values must be 1.\n";
181
182 throw logic_error(buffer.str());
183 }
184
185 inputs_minimums[index] = values[0];
186
187 return;
188
189 case Condition::Between:
190
191 if(values.size() != 2)
192 {
193 buffer << "OpenNN Exception: ResponseOptimization class.\n"
194 << "void set_input_condition() method.\n"
195 << "For Between condition, size of values must be 2.\n";
196
197 throw logic_error(buffer.str());
198 }
199
200 inputs_minimums[index] = values[0];
201 inputs_maximums[index] = values[1];
202
203 return;
204 }
205}
206
207
208void ResponseOptimization::set_output_condition(const Index& index, const ResponseOptimization::Condition& condition, const Tensor<type, 1>& values)
209{
210 outputs_conditions[index] = condition;
211
212 ostringstream buffer;
213
214 switch(condition)
215 {
216 case Condition::Minimum:
217
218 if(values.size() != 0)
219 {
220 buffer << "OpenNN Exception: ResponseOptimization class.\n"
221 << "void set_output_condition() method.\n"
222 << "For Minimum condition, size of values must be 0.\n";
223
224 throw logic_error(buffer.str());
225 }
226
227 return;
228
229 case Condition::Maximum:
230
231 if(values.size() != 0)
232 {
233 buffer << "OpenNN Exception: ResponseOptimization class.\n"
234 << "void set_output_condition() method.\n"
235 << "For Maximum condition, size of values must be 0.\n";
236
237 throw logic_error(buffer.str());
238 }
239
240 return;
241
242 case Condition::EqualTo:
243
244 if(values.size() != 1)
245 {
246 buffer << "OpenNN Exception: ResponseOptimization class.\n"
247 << "void set_output_condition() method.\n"
248 << "For LessEqualTo condition, size of values must be 1.\n";
249
250 throw logic_error(buffer.str());
251 }
252
253 outputs_minimums[index] = values[0];
254 outputs_maximums[index] = values[0];
255
256 return;
257
258 case Condition::LessEqualTo:
259
260 if(values.size() != 1)
261 {
262 buffer << "OpenNN Exception: ResponseOptimization class.\n"
263 << "void set_output_condition() method.\n"
264 << "For LessEqualTo condition, size of values must be 1.\n";
265
266 throw logic_error(buffer.str());
267 }
268
269 outputs_maximums[index] = values[0];
270
271 return;
272
273 case Condition::GreaterEqualTo:
274
275 if(values.size() != 1)
276 {
277 buffer << "OpenNN Exception: ResponseOptimization class.\n"
278 << "void set_output_condition() method.\n"
279 << "For LessEqualTo condition, size of values must be 1.\n";
280
281 throw logic_error(buffer.str());
282 }
283
284 outputs_minimums[index] = values[0];
285
286 return;
287
288 case Condition::Between:
289
290 if(values.size() != 2)
291 {
292 buffer << "OpenNN Exception: ResponseOptimization class.\n"
293 << "void set_output_condition() method.\n"
294 << "For Between condition, size of values must be 2.\n";
295
296 throw logic_error(buffer.str());
297 }
298
299 outputs_minimums[index] = values[0];
300 outputs_maximums[index] = values[1];
301
302 return;
303 }
304}
305
306
308
309void ResponseOptimization::set_inputs_outputs_conditions(const Tensor<string, 1>& names,
310 const Tensor<string, 1>& conditions_string,
311 const Tensor<type, 1>& values)
312{
313 const Tensor<Condition, 1> conditions = get_conditions(conditions_string);
314
315 const Tensor<Tensor<type, 1>, 1> values_conditions = get_values_conditions(conditions, values);
316
317 const Index variables_number = conditions_string.size();
318
319 const Tensor<string, 1> inputs_names = neural_network_pointer->get_inputs_names();
320
321 Index index;
322
323 for(Index i = 0; i < variables_number; i ++)
324 {
325// if(inputs_names.contains(names[i]))
326 {
327 index = neural_network_pointer->get_input_index(names[i]);
328
329 set_input_condition(index, conditions[i], values_conditions[i]);
330 }
331// else
332 {
333 index = neural_network_pointer->get_output_index(names[i]);
334
335 set_output_condition(index, conditions[i], values_conditions[i]);
336 }
337 }
338}
339
340
341Tensor<ResponseOptimization::Condition, 1> ResponseOptimization::get_conditions(const Tensor<string, 1>& conditions_string) const
342{
343 const Index conditions_size = conditions_string.size();
344
345 Tensor<Condition, 1> conditions(conditions_size);
346
347 for(Index i = 0; i < conditions_size; i++)
348 {
349 if(conditions_string[i] == "Minimize")
350 {
351 conditions[i] = Condition::Minimum;
352 }
353 else if(conditions_string[i] == "Maximize")
354 {
355 conditions[i] = Condition::Maximum;
356 }
357 else if(conditions_string[i] == "=")
358 {
359 conditions[i] = Condition::EqualTo;
360 }
361 else if(conditions_string[i] == "Between")
362 {
363 conditions[i] = Condition::Between;
364 }
365 else if(conditions_string[i] == ">="
366 || conditions_string[i] == ">")
367 {
368 conditions[i] = Condition::GreaterEqualTo;
369 }
370 else if(conditions_string[i] == "<="
371 || conditions_string[i] == "<")
372 {
373 conditions[i] = Condition::LessEqualTo;
374 }
375 }
376
377 return conditions;
378}
379
380
381Tensor<Tensor<type, 1>, 1> ResponseOptimization::get_values_conditions(const Tensor<ResponseOptimization::Condition, 1>& conditions, const Tensor<type, 1>& values) const
382{
383 const Index conditions_size = conditions.size();
384
385 Tensor<Tensor<type, 1>, 1> values_conditions(conditions_size);
386
387 Index index = 0;
388
389 ostringstream buffer;
390
391 for(Index i = 0; i < conditions_size; i++)
392 {
393 Tensor<type, 1> current_values;
394
395 const Condition current_condition = conditions[i];
396
397 switch(current_condition)
398 {
399 case Condition::Minimum:
400
401 values_conditions[i].resize(0);
402
403 index++;
404 break;
405
406 case Condition::Maximum:
407
408 values_conditions[i].resize(0);
409
410 index++;
411 break;
412
413 case Condition::EqualTo:
414
415 current_values.resize(1);
416 current_values[0] = values[index];
417 index++;
418
419 values_conditions[i] = current_values;
420
421 break;
422
423 case Condition::LessEqualTo:
424
425 current_values.resize(1);
426 current_values[0] = values[index];
427 index++;
428
429 values_conditions[i] = current_values;
430
431 break;
432
433 case Condition::GreaterEqualTo:
434
435 current_values.resize(1);
436 current_values[0] = values[index];
437
438 index++;
439
440 values_conditions[i] = current_values;
441
442 break;
443
444 case Condition::Between:
445
446 current_values.resize(2);
447 current_values[0] = values[index];
448 index++;
449 current_values[1] = values[index];
450 index++;
451
452 values_conditions[i] = current_values;
453
454 break;
455 }
456 }
457
458 return values_conditions;
459}
460
461
462Tensor<type, 2> ResponseOptimization::calculate_inputs() const
463{
464 const Index inputs_number = neural_network_pointer->get_inputs_number();
465
466 Tensor<type, 2> inputs(evaluations_number, inputs_number);
467
468 for(Index i = 0; i < evaluations_number; i++)
469 {
470 for(Index j = 0; j < inputs_number; j++)
471 {
472 inputs(i,j) = calculate_random_uniform(inputs_minimums[j], inputs_maximums[j]);
473 }
474 }
475
476 return inputs;
477}
478
479
480Tensor<type, 2> ResponseOptimization::calculate_envelope(const Tensor<type, 2>& inputs, const Tensor<type, 2>& outputs) const
481{
482 const Index inputs_number = neural_network_pointer->get_inputs_number();
483 const Index outputs_number = neural_network_pointer->get_outputs_number();
484
485// Tensor<type, 2> envelope = (inputs.to_matrix()).assemble_columns((outputs.to_matrix()));
486
487 for(Index i = 0; i < outputs_number; i++)
488 {
489// envelope = envelope.filter_column_minimum_maximum(inputs_number+i, outputs_minimums[i], outputs_maximums[i]);
490 }
491
492// return envelope;
493
494 return Tensor<type, 2>();
495}
496
497
499
501{
502 ResponseOptimizationResults* results = new ResponseOptimizationResults(neural_network_pointer);
503
504 const Tensor<type, 2> inputs = calculate_inputs();
505
506 const Tensor<type, 2> outputs = neural_network_pointer->calculate_outputs(inputs);
507
508 const Tensor<type, 2> envelope = calculate_envelope(inputs, outputs);
509
510 const Index samples_number = envelope.dimension(0);
511
512 const Index inputs_number = neural_network_pointer->get_inputs_number();
513 const Index outputs_number = neural_network_pointer->get_outputs_number();
514
515 Tensor<type, 1> objective(samples_number);
516
517 for(Index i = 0; i < samples_number; i++)
518 {
519 for(Index j = 0; j < inputs_number; j++)
520 {
521 if(inputs_conditions[j] == Condition::Minimum)
522 {
523 objective[i] += envelope(i,j);
524 }
525 else if(inputs_conditions[j] == Condition::Maximum)
526 {
527 objective[i] += -envelope(i,j);
528 }
529 }
530
531 for(Index j = 0; j < outputs_number; j++)
532 {
533 if(outputs_conditions[j] == Condition::Minimum)
534 {
535 objective[i] += envelope(i, inputs_number+j);
536 }
537 else if(outputs_conditions[j] == Condition::Maximum)
538 {
539 objective[i] += -envelope(i, inputs_number+j);
540 }
541 }
542 }
543
544 const Index optimal_index = minimal_index(objective);
545
546// results->optimal_variables = envelope.get_row(optimal_index);
547
548 results->optimum_objective = objective[optimal_index];
549
550 return results;
551}
552
553
554type ResponseOptimization::calculate_random_uniform(const type& minimum, const type& maximum) const
555{
556 const type random = static_cast<type>(rand()/(RAND_MAX+1.0));
557
558 const type random_uniform = minimum + (maximum - minimum) * random;
559
560 return random_uniform;
561}
562
563}
564
565// OpenNN: Open Neural Networks Library.
566// Copyright(C) 2005-2021 Artificial Intelligence Techniques, SL.
567//
568// This library is free software; you can redistribute it and/or
569// modify it under the terms of the GNU Lesser General Public
570// License as published by the Free Software Foundation; either
571// version 2.1 of the License, or any later version.
572//
573// This library is distributed in the hope that it will be useful,
574// but WITHOUT ANY WARRANTY; without even the implied warranty of
575// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
576// Lesser General Public License for more details.
577
578// You should have received a copy of the GNU Lesser General Public
579// License along with this library; if not, write to the Free Software
580// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
const Tensor< type, 1 > & get_upper_bounds() const
Returns the upper bounds values of all the bounding neurons in the layer.
const Tensor< type, 1 > & get_lower_bounds() const
Returns the lower bounds values of all the bounding neurons in the layer.
ScalingLayer * get_scaling_layer_pointer() const
Returns a pointer to the scaling layers object composing this neural network object.
Index get_inputs_number() const
Returns the number of inputs to the neural network.
Index get_input_index(const string &) const
const Tensor< string, 1 > & get_inputs_names() const
Returns a string vector with the names of the variables used as inputs.
Index get_output_index(const string &) const
Tensor< type, 2 > calculate_outputs(const Tensor< type, 2 > &)
BoundingLayer * get_bounding_layer_pointer() const
Returns a pointer to the bounding layers object composing this neural network object.
ResponseOptimizationResults * perform_optimization() const
void set_inputs_outputs_conditions(const Tensor< string, 1 > &, const Tensor< string, 1 > &, const Tensor< type, 1 > &=Tensor< type, 1 >())
Condition
Enumeration of available conditions for response optimization.
virtual ~ResponseOptimization()
Destructor.
Tensor< type, 1 > get_minimums() const
Returns a single matrix with the minimums of all scaling neurons.
Tensor< type, 1 > get_maximums() const
Returns a single matrix with the maximums of all scaling neurons.