00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018 #include "MinkowskiError.h"
00019
00020
00021
00022 #include <string>
00023 #include <sstream>
00024
00025 #include <iostream>
00026 #include <fstream>
00027 #include <cmath>
00028
00029 namespace Flood
00030 {
00031
00032
00033
00040
00041 MinkowskiError::MinkowskiError(void) : ObjectiveFunctional()
00042 {
00043 input_target_data_set_pointer = NULL;
00044
00045 Minkowski_parameter = 1.5;
00046 }
00047
00048
00049
00050
00058
00059 MinkowskiError::MinkowskiError(MultilayerPerceptron* new_multilayer_perceptron_pointer)
00060 : ObjectiveFunctional(new_multilayer_perceptron_pointer)
00061 {
00062 input_target_data_set_pointer = NULL;
00063
00064 Minkowski_parameter = 1.5;
00065 }
00066
00067
00068
00069
00077
00078 MinkowskiError::MinkowskiError(InputTargetDataSet* new_input_target_data_set_pointer)
00079 : ObjectiveFunctional()
00080 {
00081 input_target_data_set_pointer = new_input_target_data_set_pointer;
00082
00083 Minkowski_parameter = 1.5;
00084 }
00085
00086
00087
00088
00097
00098 MinkowskiError::MinkowskiError(MultilayerPerceptron* new_multilayer_perceptron_pointer,
00099 InputTargetDataSet* new_input_target_data_set_pointer): ObjectiveFunctional(new_multilayer_perceptron_pointer)
00100 {
00101 input_target_data_set_pointer = new_input_target_data_set_pointer;
00102
00103 Minkowski_parameter = 1.5;
00104 }
00105
00106
00107
00108
00110
00111 MinkowskiError::~MinkowskiError(void)
00112 {
00113
00114 }
00115
00116
00117
00118
00119
00120
00122
00123 double MinkowskiError::get_Minkowski_parameter(void)
00124 {
00125 return(Minkowski_parameter);
00126 }
00127
00128
00129
00130
00136
00137 void MinkowskiError::set(void)
00138 {
00139 multilayer_perceptron_pointer = NULL;
00140 input_target_data_set_pointer = NULL;
00141 set_default();
00142 Minkowski_parameter = 1.5;
00143 }
00144
00145
00146
00147
00154
00155 void MinkowskiError::set(MultilayerPerceptron* new_multilayer_perceptron_pointer)
00156 {
00157 multilayer_perceptron_pointer = new_multilayer_perceptron_pointer;
00158 input_target_data_set_pointer = NULL;
00159 set_default();
00160 Minkowski_parameter = 1.5;
00161 }
00162
00163
00164
00165
00172
00173 void MinkowskiError::set(InputTargetDataSet* new_input_target_data_set_pointer)
00174 {
00175 multilayer_perceptron_pointer = NULL;
00176 input_target_data_set_pointer = new_input_target_data_set_pointer;
00177 set_default();
00178 Minkowski_parameter = 1.5;
00179 }
00180
00181
00182
00183
00191
00192 void MinkowskiError::set(MultilayerPerceptron* new_multilayer_perceptron_pointer, InputTargetDataSet* new_input_target_data_set_pointer)
00193 {
00194 multilayer_perceptron_pointer = new_multilayer_perceptron_pointer;
00195 input_target_data_set_pointer = new_input_target_data_set_pointer;
00196 set_default();
00197 Minkowski_parameter = 1.5;
00198 }
00199
00200
00201
00202
00205
00206 void MinkowskiError::set_input_target_data_set_pointer(InputTargetDataSet* new_input_target_data_set_pointer)
00207 {
00208 input_target_data_set_pointer = new_input_target_data_set_pointer;
00209 }
00210
00211
00212
00213
00217
00218 void MinkowskiError::set_Minkowski_parameter(double new_Minkowski_parameter)
00219 {
00220
00221
00222 if(new_Minkowski_parameter < 1.0 || new_Minkowski_parameter > 2.0)
00223 {
00224 std::cerr << "Flood Error. MinkowskiError class." << std::endl
00225 << "void set_Minkowski_parameter(double) method." << std::endl
00226 << "The Minkowski parameter must be comprised between 1 and 2" << std::endl
00227 << std::endl;
00228
00229 exit(1);
00230 }
00231
00232
00233
00234 Minkowski_parameter = new_Minkowski_parameter;
00235 }
00236
00237
00238
00239
00242
00243 double MinkowskiError::calculate_objective(void)
00244 {
00245
00246
00247 #ifdef _DEBUG
00248
00249 if(multilayer_perceptron_pointer == NULL)
00250 {
00251 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00252 << "double calculate_objective(void) method." << std::endl
00253 << "Pointer to multilayer perceptron object cannot be NULL." << std::endl;
00254
00255 exit(1);
00256 }
00257 else if(input_target_data_set_pointer == NULL)
00258 {
00259 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00260 << "double calculate_objective(void) method." << std::endl
00261 << "Pointer to input-target data set object cannot be NULL." << std::endl;
00262
00263 exit(1);
00264 }
00265
00266 #endif
00267
00268 int training_instances_number = input_target_data_set_pointer->get_training_instances_number();
00269
00270 int inputs_number = multilayer_perceptron_pointer->get_inputs_number();
00271 int outputs_number = multilayer_perceptron_pointer->get_outputs_number();
00272
00273 #ifdef _DEBUG
00274
00275 int input_variables_number = input_target_data_set_pointer->get_input_variables_number();
00276
00277 if(inputs_number != input_variables_number)
00278 {
00279 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00280 << "double calculate_objective(void) method." << std::endl
00281 << "Number of inputs in multilayer perceptron must be equal to "
00282 << "number of input variables in input-target data set." << std::endl;
00283
00284 exit(1);
00285 }
00286
00287 int target_variables_number = input_target_data_set_pointer->get_target_variables_number();
00288
00289 if(outputs_number != target_variables_number)
00290 {
00291 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00292 << "double calculate_objective(void) method." << std::endl
00293 << "Number of outputs in multilayer perceptron must be equal to "
00294 << "number of target variables in input-target data set." << std::endl;
00295
00296 exit(1);
00297 }
00298
00299 #endif
00300
00301 Vector<double> input(inputs_number);
00302 Vector<double> output(outputs_number);
00303 Vector<double> target(outputs_number);
00304 double instance_error;
00305
00306 double training_error = 0.0;
00307
00308 for(int i = 0; i < training_instances_number; i++)
00309 {
00310
00311
00312 input = input_target_data_set_pointer->get_training_input_instance(i);
00313
00314
00315
00316 output = multilayer_perceptron_pointer->calculate_output(input);
00317
00318
00319
00320 target = input_target_data_set_pointer->get_training_target_instance(i);
00321
00322
00323
00324 for(int j = 0; j < outputs_number; j++)
00325 {
00326 instance_error = fabs(output[j] - target[j]);
00327
00328 training_error += pow(instance_error, Minkowski_parameter);
00329 }
00330 }
00331
00332 return(training_error);
00333 }
00334
00335
00336
00337
00340
00341 double MinkowskiError::calculate_validation_error(void)
00342 {
00343
00344
00345 #ifdef _DEBUG
00346
00347 if(multilayer_perceptron_pointer == NULL)
00348 {
00349 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00350 << "double calculate_validation_error(void) method." << std::endl
00351 << "Pointer to multilayer perceptron object cannot be NULL." << std::endl;
00352
00353 exit(1);
00354 }
00355 else if(input_target_data_set_pointer == NULL)
00356 {
00357 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00358 << "double calculate_validation_error(void) method." << std::endl
00359 << "Pointer to input-target data set object cannot be NULL." << std::endl;
00360
00361 exit(1);
00362 }
00363
00364 #endif
00365
00366 int inputs_number = multilayer_perceptron_pointer->get_inputs_number();
00367 int outputs_number = multilayer_perceptron_pointer->get_outputs_number();
00368
00369 #ifdef _DEBUG
00370
00371 int input_variables_number = input_target_data_set_pointer->get_input_variables_number();
00372 int target_variables_number = input_target_data_set_pointer->get_target_variables_number();
00373
00374 if(inputs_number != input_variables_number || outputs_number != target_variables_number)
00375 {
00376 std::cout << "Flood Error: MinkowskiError class." << std::endl
00377 << "double calculate_validation_error(void) method." << std::endl
00378 << "Number of inputs and outputs in multilayer perceptron must be equal to "
00379 << "number of input and output variables in input-target data set." << std::endl;
00380
00381 exit(1);
00382 }
00383
00384 #endif
00385
00386 int validation_instances_number = input_target_data_set_pointer->get_validation_instances_number();
00387
00388 Vector<double> input(inputs_number);
00389 Vector<double> output(outputs_number);
00390 Vector<double> target(outputs_number);
00391
00392 double instance_error;
00393
00394 double validation_error = 0.0;
00395
00396 for(int i = 0; i < validation_instances_number; i++)
00397 {
00398
00399
00400 input = input_target_data_set_pointer->get_validation_input_instance(i);
00401
00402
00403
00404 output = multilayer_perceptron_pointer->calculate_output(input);
00405
00406
00407
00408 target = input_target_data_set_pointer->get_validation_target_instance(i);
00409
00410
00411
00412 for(int j = 0; j < outputs_number; j++)
00413 {
00414 instance_error = fabs(output[j] - target[j]);
00415
00416 validation_error += pow(instance_error, Minkowski_parameter);
00417 }
00418 }
00419
00420 return(validation_error);
00421 }
00422
00423
00424
00425
00429
00430 Vector<double> MinkowskiError::calculate_output_errors
00431 (const Vector< Vector<double> >& forward_propagation_derivative, const Vector<double>& target)
00432 {
00433 int forward_propagation_derivative_size = forward_propagation_derivative.get_size();
00434 int outputs_number = multilayer_perceptron_pointer->get_outputs_number();
00435
00436
00437
00438 #ifdef _DEBUG
00439
00440 int hidden_layers_number = multilayer_perceptron_pointer->get_hidden_layers_number();
00441
00442 if(forward_propagation_derivative_size != 2*hidden_layers_number+2)
00443 {
00444 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00445 << "Vector<double> calculate_output_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00446 << "Size of forward propagation derivative vector must be equal to 2*hidden_layers_number+2."
00447 << std::endl;
00448
00449 exit(1);
00450 }
00451
00452 int output_layer_output_size = forward_propagation_derivative[forward_propagation_derivative_size-2].get_size();
00453
00454 if(output_layer_output_size != outputs_number)
00455 {
00456 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00457 << "Vector<double> calculate_output_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00458 << "Size of output layer output ("<< output_layer_output_size << ") must be equal to "
00459 << "number of outputs (" << outputs_number << ")." << std::endl;
00460
00461 exit(1);
00462 }
00463
00464 int output_layer_output_derivative_size
00465 = forward_propagation_derivative[forward_propagation_derivative_size-1].get_size();
00466
00467 if(output_layer_output_derivative_size != outputs_number)
00468 {
00469 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00470 << "Vector<double> calculate_output_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00471 << "Size of output layer output derivative (" << output_layer_output_derivative_size << ")must be equal to " << "number of outputs (" << outputs_number << ")." << std::endl;
00472
00473 exit(1);
00474 }
00475
00476 int target_size = target.get_size();
00477
00478 if(target_size != outputs_number)
00479 {
00480 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00481 << "Vector<double> calculate_output_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00482 << "Size of target must be equal to number of outputs." << std::endl;
00483
00484 exit(1);
00485 }
00486
00487 #endif
00488
00489 Vector<double> output_layer_output_derivative = forward_propagation_derivative[forward_propagation_derivative_size-1];
00490 Vector<double> output_layer_output = forward_propagation_derivative[forward_propagation_derivative_size-2];
00491
00492 Vector<double> output_errors(outputs_number);
00493
00494 Vector<double> error(outputs_number);
00495
00496 MultilayerPerceptron::ScalingMethod outputs_unscaling_method
00497 = multilayer_perceptron_pointer->get_outputs_unscaling_method();
00498
00499 switch(outputs_unscaling_method)
00500 {
00501 case MultilayerPerceptron::None:
00502 {
00503 error = output_layer_output-target;
00504
00505 output_errors = output_layer_output_derivative*error*2.0;
00506 }
00507 break;
00508
00509 case MultilayerPerceptron::MeanStandardDeviation:
00510 {
00511 Vector<double> output_variables_standard_deviation = multilayer_perceptron_pointer->get_output_variables_standard_deviation();
00512
00513
00514
00515 #ifdef _DEBUG
00516
00517 int output_variables_standard_deviation_size = output_variables_standard_deviation.get_size();
00518
00519 if(output_variables_standard_deviation_size != outputs_number)
00520 {
00521 std::cerr << "Flood Error: MinkowskidError class." << std::endl
00522 << "Vector<double> calculate_output_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00523 << "Size of output variables standard deviation must be equal to number of outputs." << std::endl;
00524
00525 exit(1);
00526 }
00527
00528 #endif
00529
00530 output_errors = output_layer_output_derivative*error*output_variables_standard_deviation*2.0;
00531 }
00532 break;
00533
00534 case MultilayerPerceptron::MinimumMaximum:
00535 {
00536 Vector<double> output_variables_minimum = multilayer_perceptron_pointer->get_output_variables_minimum();
00537 Vector<double> output_variables_maximum = multilayer_perceptron_pointer->get_output_variables_maximum();
00538
00539
00540
00541 #ifdef _DEBUG
00542
00543 int output_variables_minimum_size = output_variables_minimum.get_size();
00544 int output_variables_maximum_size = output_variables_maximum.get_size();
00545
00546 if(output_variables_minimum_size != outputs_number)
00547 {
00548 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00549 << "Vector<double> calculate_output_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00550 << "Size of output variables minimum must be equal to number of outputs." << std::endl;
00551
00552 exit(1);
00553 }
00554 else if(output_variables_maximum_size != outputs_number)
00555 {
00556 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00557 << "Vector<double> calculate_output_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00558 << "Size of output variables maximum must be equal to number of outputs." << std::endl;
00559
00560 exit(1);
00561 }
00562
00563 #endif
00564
00565
00566 Vector<double> output_variables_range = output_variables_maximum-output_variables_minimum;
00567
00568 output_errors = output_layer_output_derivative*error*output_variables_range;
00569 }
00570 break;
00571
00572 default:
00573 {
00574 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00575 << "Vector<double> calculate_output_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00576 << "Unknown outputs unscaling method." << std::endl;
00577
00578 exit(1);
00579 }
00580 break;
00581
00582 }
00583
00584 return(output_errors);
00585 }
00586
00587
00588
00589
00593
00594 Vector< Vector<double> > MinkowskiError::calculate_hidden_errors
00595 (const Vector< Vector<double> >& forward_propagation_derivative, const Vector<double>& output_errors)
00596 {
00597 int hidden_layers_number = multilayer_perceptron_pointer->get_hidden_layers_number();
00598 int outputs_number = multilayer_perceptron_pointer->get_outputs_number();
00599
00600
00601
00602 #ifdef _DEBUG
00603
00604 int forward_propagation_derivative_size = forward_propagation_derivative.get_size();
00605
00606 if(forward_propagation_derivative_size != 2*hidden_layers_number+2)
00607 {
00608 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00609 << "Vector< Vector<double> > calculate_hidden_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00610 << "Size of forward propagation derivative vector must be equal to 2*hidden_layers_number+2."
00611 << std::endl;
00612
00613 exit(1);
00614 }
00615
00616 int output_layer_output_size = forward_propagation_derivative[forward_propagation_derivative_size-2].get_size();
00617
00618 if(output_layer_output_size != outputs_number)
00619 {
00620 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00621 << "Vector< Vector<double> > calculate_hidden_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00622 << "Size of output layer output ("<< output_layer_output_size << ") must be equal to "
00623 << "number of outputs (" << outputs_number << ")." << std::endl;
00624
00625 exit(1);
00626 }
00627
00628 int output_layer_output_derivative_size
00629 = forward_propagation_derivative[forward_propagation_derivative_size-1].get_size();
00630
00631 if(output_layer_output_derivative_size != outputs_number)
00632 {
00633 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00634 << "Vector< Vector<double> > calculate_hidden_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00635 << "Size of output layer output derivative (" << output_layer_output_derivative_size << ")must be equal to "
00636 << "number of outputs (" << outputs_number << ")." << std::endl;
00637
00638 exit(1);
00639 }
00640
00641 int output_errors_size = output_errors.get_size();
00642
00643 if(output_errors_size != outputs_number)
00644 {
00645 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00646 << "Vector< Vector<double> > calculate_hidden_errors(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00647 << "Size of target must be equal to number of outputs." << std::endl;
00648
00649 exit(1);
00650 }
00651
00652 #endif
00653
00654
00655
00656 Vector<int> hidden_layers_size = multilayer_perceptron_pointer->get_hidden_layers_size();
00657
00658 Vector< Vector<double> > hidden_errors(hidden_layers_number);
00659
00660 for(int h = 0; h < hidden_layers_number; h++)
00661 {
00662 hidden_errors[h].set_size(hidden_layers_size[h]);
00663 }
00664
00665
00666
00667 Vector<Perceptron>& output_layer = multilayer_perceptron_pointer->get_output_layer();
00668 Vector< Vector<Perceptron> >& hidden_layers = multilayer_perceptron_pointer->get_hidden_layers();
00669
00670 Vector< Vector<double> > hidden_layers_output_derivative(hidden_layers_number);
00671
00672 for(int i = 0; i < hidden_layers_number; i++)
00673 {
00674 hidden_layers_output_derivative[i] = forward_propagation_derivative[1+2*i];
00675 }
00676
00677 Vector<double> synaptic_weights;
00678
00679 double sum;
00680
00681
00682
00683 for(int j = 0; j < hidden_layers_size[hidden_layers_number-1]; j++)
00684 {
00685 sum = 0.0;
00686
00687 for(int k = 0; k < outputs_number; k++)
00688 {
00689 synaptic_weights = output_layer[k].get_synaptic_weights();
00690
00691 sum += (synaptic_weights[j])*output_errors[k];
00692 }
00693
00694 hidden_errors[hidden_layers_number-1][j] = hidden_layers_output_derivative[hidden_layers_number-1][j]*sum;
00695 }
00696
00697
00698
00699 for(int h = hidden_layers_number-2; h >= 0; h--)
00700 {
00701 for(int j = 0; j < hidden_layers_size[h]; j++)
00702 {
00703 sum = 0.0;
00704
00705 for(int k = 0; k < hidden_layers_size[h+1]; k++)
00706 {
00707 synaptic_weights = hidden_layers[h+1][k].get_synaptic_weights();
00708
00709 sum += (synaptic_weights[j])*hidden_errors[h+1][k];
00710 }
00711
00712 hidden_errors[h][j] = hidden_layers_output_derivative[h][j]*sum;
00713 }
00714 }
00715
00716 return(hidden_errors);
00717 }
00718
00719
00720
00721
00726
00727 Vector<double> MinkowskiError::calculate_hidden_layers_error_gradient
00728 (const Vector<double>& input,
00729 const Vector< Vector<double> >& forward_propagation_derivative,
00730 const Vector< Vector<double> >& hidden_errors)
00731 {
00732 int inputs_number = multilayer_perceptron_pointer->get_inputs_number();
00733 int hidden_layers_number = multilayer_perceptron_pointer->get_hidden_layers_number();
00734
00735
00736
00737 #ifdef _DEBUG
00738
00739 int input_size = input.get_size();
00740
00741 if(input_size != inputs_number)
00742 {
00743 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00744 << "Vector< Vector<double> > calculate_hidden_layers_error_gradient(const Vector< Vector<double> >&, const Vector<double>&, const Vector<double>&) method." << std::endl
00745 << "Size of input (" << input_size << ") must be equal to inputs number (" << inputs_number << ")."
00746 << std::endl;
00747
00748 exit(1);
00749 }
00750
00751
00752 int forward_propagation_derivative_size = forward_propagation_derivative.get_size();
00753
00754 if(forward_propagation_derivative_size != 2*hidden_layers_number+2)
00755 {
00756 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00757 << "Vector< Vector<double> > calculate_hidden_layers_error_gradient(const Vector< Vector<double> >&, const Vector<double>&, const Vector<double>&) method." << std::endl
00758 << "Size of forward propagation derivative (" << forward_propagation_derivative_size << ") must be equal to 2*hidden_layers_number+2 (" << 2*hidden_layers_number+2 << ")."
00759 << std::endl;
00760
00761 exit(1);
00762 }
00763
00764 int hidden_errors_size = hidden_errors.get_size();
00765
00766 if(hidden_errors_size != hidden_layers_number)
00767 {
00768 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00769 << "Vector< Vector<double> > calculate_hidden_layers_error_gradient(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00770 << "Size of output errors ("<< hidden_errors_size << ") must be equal to number of hidden layers (" << hidden_layers_number << ")." << std::endl;
00771
00772 exit(1);
00773 }
00774
00775 #endif
00776
00777
00778
00779 Vector<int> hidden_layers_size = multilayer_perceptron_pointer->get_hidden_layers_size();
00780
00781 int hidden_layers_parameters_number = multilayer_perceptron_pointer->get_hidden_layers_parameters_number();
00782
00783 Vector< Vector<Perceptron> >& hidden_layers = multilayer_perceptron_pointer->get_hidden_layers();
00784
00785 Vector<double> synaptic_weights;
00786
00787 Vector< Vector<double> > hidden_layers_output(hidden_layers_number);
00788
00789 for(int i = 0; i < hidden_layers_number; i++)
00790 {
00791 hidden_layers_output[i] = forward_propagation_derivative[2*i];
00792 }
00793
00794 int index = 0;
00795
00796 Vector<double> hidden_layers_error_gradient(hidden_layers_parameters_number, 0.0);
00797
00798
00799
00800 for(int j = 0; j < hidden_layers_size[0]; j++)
00801 {
00802
00803
00804 hidden_layers_error_gradient[index] += hidden_errors[0][j];
00805 index++;
00806
00807
00808
00809 synaptic_weights = hidden_layers[0][j].get_synaptic_weights();
00810
00811 for(int k = 0; k < inputs_number; k++)
00812 {
00813 hidden_layers_error_gradient[index] += hidden_errors[0][j]*input[k];
00814 index++;
00815 }
00816 }
00817
00818
00819
00820 for(int h = 1; h < hidden_layers_number; h++)
00821 {
00822 for(int j = 0; j < hidden_layers_size[h]; j++)
00823 {
00824
00825
00826 hidden_layers_error_gradient[index] += hidden_errors[h][j];
00827 index++;
00828
00829
00830
00831 synaptic_weights = hidden_layers[h][j].get_synaptic_weights();
00832
00833 for(int k = 0; k < hidden_layers_size[h-1]; k++)
00834 {
00835 hidden_layers_error_gradient[index] += hidden_errors[h][j]*hidden_layers_output[h-1][k];
00836 index++;
00837 }
00838 }
00839 }
00840
00841 return(hidden_layers_error_gradient);
00842 }
00843
00844
00845
00846
00850
00851 Vector<double> MinkowskiError::calculate_output_layer_error_gradient
00852 (const Vector< Vector<double> >& forward_propagation_derivative, const Vector<double>& output_errors)
00853 {
00854 int outputs_number = multilayer_perceptron_pointer->get_outputs_number();
00855 int hidden_layers_number = multilayer_perceptron_pointer->get_hidden_layers_number();
00856
00857
00858
00859 #ifdef _DEBUG
00860
00861 int forward_propagation_derivative_size = forward_propagation_derivative.get_size();
00862
00863 if(forward_propagation_derivative_size != 2*hidden_layers_number+2)
00864 {
00865 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00866 << "Vector< Vector<double> > calculate_output_layer_error_gradient(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00867 << "Size of forward propagation derivative (" << forward_propagation_derivative_size << ") must be equal to 2*hidden_layers_number+2 (" << 2*hidden_layers_number+2 << ")."
00868 << std::endl;
00869
00870 exit(1);
00871 }
00872
00873 int output_errors_size = output_errors.get_size();
00874
00875 if(output_errors_size != outputs_number)
00876 {
00877 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00878 << "Vector< Vector<double> > calculate_output_layer_error_gradient(const Vector< Vector<double> >&, const Vector<double>&) method." << std::endl
00879 << "Size of output errors ("<< output_errors_size << ") must be equal to number of outputs (" << outputs_number << ")." << std::endl;
00880
00881 exit(1);
00882 }
00883
00884 #endif
00885
00886
00887
00888 Vector<int> hidden_layers_size = multilayer_perceptron_pointer->get_hidden_layers_size();
00889
00890 int output_layer_parameters_number = multilayer_perceptron_pointer->get_output_layer_parameters_number();
00891
00892 Vector< Vector<double> > hidden_layers_output(hidden_layers_number);
00893
00894 for(int i = 0; i < hidden_layers_number; i++)
00895 {
00896 hidden_layers_output[i] = forward_propagation_derivative[2*i];
00897 }
00898
00899
00900
00901 Vector<double> output_layer_error_gradient(output_layer_parameters_number, 0.0);
00902
00903 int index = 0;
00904
00905 for(int j = 0; j < outputs_number; j++)
00906 {
00907
00908
00909 output_layer_error_gradient[index] += output_errors[j];
00910 index++;
00911
00912
00913
00914 for(int k = 0; k < hidden_layers_size[hidden_layers_number-1]; k++)
00915 {
00916 output_layer_error_gradient[index] = hidden_layers_output[hidden_layers_number-1][k]*output_errors[j];
00917 index++;
00918 }
00919 }
00920
00921 return(output_layer_error_gradient);
00922 }
00923
00924
00925
00926
00927
00930
00931 Vector<double> MinkowskiError::calculate_objective_gradient(void)
00932 {
00933
00934
00935 #ifdef _DEBUG
00936
00937 if(multilayer_perceptron_pointer == NULL)
00938 {
00939 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00940 << "double calculate_objective_gradient(void) method." << std::endl
00941 << "Pointer to multilayer perceptron object cannot be NULL." << std::endl;
00942
00943 exit(1);
00944 }
00945 else if(input_target_data_set_pointer == NULL)
00946 {
00947 std::cerr << "Flood Error: MinkowskiError class." << std::endl
00948 << "double calculate_objective_gradient(void) method." << std::endl
00949 << "Pointer to input-target data set object cannot be NULL." << std::endl;
00950
00951 exit(1);
00952 }
00953
00954 #endif
00955
00956
00957
00958 int hidden_layers_number = multilayer_perceptron_pointer->get_hidden_layers_number();
00959
00960 Vector<int> hidden_layers_size = multilayer_perceptron_pointer->get_hidden_layers_size();
00961
00962 int outputs_number = multilayer_perceptron_pointer->get_outputs_number();
00963
00964 int forward_propagation_derivative_size = 2*hidden_layers_number + 2;
00965
00966 Vector< Vector<double> > forward_propagation_derivative(forward_propagation_derivative_size);
00967
00968
00969
00970 int training_instances_number = input_target_data_set_pointer->get_training_instances_number();
00971
00972 int input_variables_number = input_target_data_set_pointer->get_input_variables_number();
00973 int target_variables_number = input_target_data_set_pointer->get_target_variables_number(); Vector<double> training_input_instance(input_variables_number);
00974 Vector<double> training_target_instance(target_variables_number);
00975
00976
00977
00978 Vector<double> output_errors(outputs_number);
00979 Vector< Vector<double> > hidden_errors(hidden_layers_number);
00980
00981 for(int h = 0; h < hidden_layers_number; h++)
00982 {
00983 hidden_errors[h].set_size(hidden_layers_size[h]);
00984 }
00985
00986
00987
00988 int hidden_layers_parameters_number = multilayer_perceptron_pointer->get_hidden_layers_parameters_number();
00989 int output_layer_parameters_number = multilayer_perceptron_pointer->get_output_layer_parameters_number();
00990
00991 Vector<double> hidden_layers_error_gradient(hidden_layers_parameters_number, 0.0);
00992 Vector<double> output_layer_error_gradient(output_layer_parameters_number, 0.0);
00993
00994
00995
00996 for(int i = 0; i < training_instances_number; i++)
00997 {
00998 training_input_instance = input_target_data_set_pointer->get_training_input_instance(i);
00999
01000 forward_propagation_derivative = multilayer_perceptron_pointer->calculate_forward_propagation_derivative(training_input_instance);
01001
01002 training_target_instance = input_target_data_set_pointer->get_training_target_instance(i);
01003
01004 output_errors = calculate_output_errors(forward_propagation_derivative, training_target_instance);
01005
01006 hidden_errors = calculate_hidden_errors(forward_propagation_derivative, output_errors);
01007
01008 hidden_layers_error_gradient += calculate_hidden_layers_error_gradient(training_input_instance, forward_propagation_derivative, hidden_errors);
01009
01010 output_layer_error_gradient += calculate_output_layer_error_gradient(forward_propagation_derivative, output_errors);
01011 }
01012
01013 return(hidden_layers_error_gradient.assemble(output_layer_error_gradient));
01014 }
01015
01016
01017
01018
01021
01022 std::string MinkowskiError::to_XML(bool show_declaration)
01023 {
01024 std::stringstream buffer;
01025
01026
01027
01028 if(show_declaration)
01029 {
01030 buffer << "<Flood version='3.0' class='ObjectiveFunctional'>" << std::endl;
01031 }
01032
01033 buffer << "<MinkowskiParameter>" << std::endl
01034 << Minkowski_parameter << std::endl
01035 << "</MinkowskiParameter>" << std::endl;
01036
01037 buffer << "<RegularizationMethod>" << std::endl
01038 << get_regularization_method_name() << std::endl
01039 << "</RegularizationMethod>" << std::endl;
01040
01041 buffer << "<ObjectiveWeight>" << std::endl
01042 << objective_weight << std::endl
01043 << "</ObjectiveWeight>" << std::endl;
01044
01045 buffer << "<RegularizationWeight>" << std::endl
01046 << regularization_weight << std::endl
01047 << "</RegularizationWeight>" << std::endl;
01048
01049 buffer << "<CalculateEvaluationCount>" << std::endl
01050 << calculate_evaluation_count << std::endl
01051 << "</CalculateEvaluationCount>" << std::endl;
01052
01053 buffer << "<CalculateGradientCount>" << std::endl
01054 << calculate_gradient_count << std::endl
01055 << "</CalculateGradientCount>" << std::endl;
01056
01057 buffer << "<CalculateHessianCount>" << std::endl
01058 << calculate_Hessian_count << std::endl
01059 << "</CalculateHessianCount>" << std::endl;
01060
01061 buffer << "<NumericalDifferentiationMethod>" << std::endl
01062 << get_numerical_differentiation_method_name() << std::endl
01063 << "</NumericalDifferentiationMethod>" << std::endl;
01064
01065 buffer << "<NumericalEpsilonMethod>" << std::endl
01066 << get_numerical_epsilon_method_name() << std::endl
01067 << "</NumericalEpsilonMethod>" << std::endl;
01068
01069 buffer << "<NumericalEpsilon>" << std::endl
01070 << numerical_epsilon << std::endl
01071 << "</NumericalEpsilon>" << std::endl;
01072
01073 buffer << "<Display>" << std::endl
01074 << display << std::endl
01075 << "</Display>" << std::endl;
01076
01077 return(buffer.str());
01078 }
01079
01080
01081
01082
01085
01086 void MinkowskiError::load(const char* filename)
01087 {
01088 std::fstream file;
01089
01090 file.open(filename, std::ios::in);
01091
01092 if(!file.is_open())
01093 {
01094 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01095 << "void load(const char*) method." << std::endl
01096 << "Cannot open XML-type Minkowski error file." << std::endl;
01097
01098 exit(1);
01099 }
01100
01101 std::string line;
01102 std::string word;
01103
01104
01105
01106 getline(file, line);
01107
01108 if(line != "<Flood version='3.0' class='MinkowskiError'>")
01109 {
01110 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01111 << "void load(const char*) method." << std::endl
01112 << "Unknown file declaration: " << line << std::endl;
01113
01114 exit(1);
01115 }
01116
01117
01118
01119 while(!file.eof())
01120 {
01121 file >> word;
01122
01123 if(word == "<RegularizationMethod>")
01124 {
01125 std::string new_regularization_method_name;
01126
01127 file >> new_regularization_method_name;
01128
01129 set_regularization_method(new_regularization_method_name);
01130
01131 file >> word;
01132
01133 if(word != "</RegularizationMethod>")
01134 {
01135 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01136 << "void load(const char*) method." << std::endl
01137 << "Unknown regularization method end tag: " << word << std::endl;
01138
01139 exit(1);
01140 }
01141 }
01142 else if(word == "<ObjectiveWeight>")
01143 {
01144 double new_objective_weight;
01145
01146 file >> new_objective_weight;
01147
01148 set_objective_weight(new_objective_weight);
01149
01150 file >> word;
01151
01152 if(word != "</ObjectiveWeight>")
01153 {
01154 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01155 << "void load(const char*) method." << std::endl
01156 << "Unknown objective weight end tag: " << word << std::endl;
01157
01158 exit(1);
01159 }
01160 }
01161 else if(word == "<RegularizationWeight>")
01162 {
01163 double new_regularization_weight;
01164
01165 file >> new_regularization_weight;
01166
01167 set_regularization_weight(new_regularization_weight);
01168
01169 file >> word;
01170
01171 if(word != "</RegularizationWeight>")
01172 {
01173 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01174 << "void load(const char*) method." << std::endl
01175 << "Unknown regularization weight end tag: " << word << std::endl;
01176
01177 exit(1);
01178 }
01179 }
01180 else if(word == "<CalculateEvaluationCount>")
01181 {
01182 int new_calculate_evaluation_count;
01183
01184 file >> new_calculate_evaluation_count;
01185
01186 set_calculate_evaluation_count(new_calculate_evaluation_count);
01187
01188 file >> word;
01189
01190 if(word != "</CalculateEvaluationCount>")
01191 {
01192 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01193 << "void load(const char*) method." << std::endl
01194 << "Unknown calculate evaluation count end tag: " << word << std::endl;
01195
01196 exit(1);
01197 }
01198 }
01199 else if(word == "<CalculateGradientCount>")
01200 {
01201 int new_calculate_gradient_count;
01202
01203 file >> new_calculate_gradient_count;
01204
01205 set_calculate_gradient_count(new_calculate_gradient_count);
01206
01207 file >> word;
01208
01209 if(word != "</CalculateGradientCount>")
01210 {
01211 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01212 << "void load(const char*) method." << std::endl
01213 << "Unknown calculate gradient count end tag: " << word << std::endl;
01214
01215 exit(1);
01216 }
01217 }
01218 else if(word == "<CalculateHessianCount>")
01219 {
01220 int new_calculate_Hessian_count;
01221
01222 file >> new_calculate_Hessian_count;
01223
01224 set_calculate_Hessian_count(new_calculate_Hessian_count);
01225
01226 file >> word;
01227
01228 if(word != "</CalculateHessianCount>")
01229 {
01230 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01231 << "void load(const char*) method." << std::endl
01232 << "Unknown calculate Hessian count end tag: " << word << std::endl;
01233
01234 exit(1);
01235 }
01236 }
01237 else if(word == "<NumericalDifferentiationMethod>")
01238 {
01239 std::string new_numerical_differentiation_method_name;
01240
01241 file >> new_numerical_differentiation_method_name;
01242
01243 set_numerical_differentiation_method(new_numerical_differentiation_method_name);
01244
01245 file >> word;
01246
01247 if(word != "</NumericalDifferentiationMethod>")
01248 {
01249 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01250 << "void load(const char*) method." << std::endl
01251 << "Unknown numerical differentiation method end tag: " << word << std::endl;
01252
01253 exit(1);
01254 }
01255 }
01256 else if(word == "<NumericalEpsilonMethod>")
01257 {
01258 std::string new_numerical_epsilon_method_name;
01259
01260 file >> new_numerical_epsilon_method_name;
01261
01262 set_numerical_epsilon_method(new_numerical_epsilon_method_name);
01263
01264 file >> word;
01265
01266 if(word != "</NumericalEpsilonMethod>")
01267 {
01268 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01269 << "void load(const char*) method." << std::endl
01270 << "Unknown numerical epsilon method end tag: " << word << std::endl;
01271
01272 exit(1);
01273 }
01274 }
01275 else if(word == "<NumericalEpsilon>")
01276 {
01277 double new_numerical_epsilon;
01278
01279 file >> new_numerical_epsilon;
01280
01281 set_numerical_epsilon(new_numerical_epsilon);
01282
01283 file >> word;
01284
01285 if(word != "</NumericalEpsilon>")
01286 {
01287 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01288 << "void load(const char*) method." << std::endl
01289 << "Unknown numerical epsilon end tag: " << word << std::endl;
01290
01291 exit(1);
01292 }
01293 }
01294 else if(word == "<Display>")
01295 {
01296 bool new_display;
01297
01298 file >> new_display;
01299
01300 set_display(new_display);
01301
01302 file >> word;
01303
01304 if(word != "</Display>")
01305 {
01306 std::cerr << "Flood Error: MinkowskiError class." << std::endl
01307 << "void load(const char*) method." << std::endl
01308 << "Unknown display end tag: " << word << std::endl;
01309
01310 exit(1);
01311 }
01312 }
01313 else
01314 {
01315
01316
01317
01318
01319
01320 }
01321 }
01322
01323 file.close();
01324 }
01325
01326 }
01327
01328
01329
01330
01331
01332
01333
01334
01335
01336
01337
01338
01339
01340
01341
01342
01343
01344