00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016 #include <iostream>
00017 #include <fstream>
00018 #include <algorithm>
00019 #include <functional>
00020 #include <limits>
00021 #include <cmath>
00022 #include <ctime>
00023
00024 #include "TrainingAlgorithm.h"
00025
00026 namespace Flood
00027 {
00028
00029
00030
00033
00034 TrainingAlgorithm::TrainingAlgorithm(ObjectiveFunctional* new_objective_functional_pointer)
00035 {
00036 objective_functional_pointer = new_objective_functional_pointer;
00037
00038 set_default();
00039 }
00040
00041
00042
00043
00045
00046 TrainingAlgorithm::TrainingAlgorithm(void)
00047 {
00048 objective_functional_pointer = NULL;
00049
00050 set_default();
00051 }
00052
00053
00054
00055
00057
00058 TrainingAlgorithm::~TrainingAlgorithm(void)
00059 {
00060 }
00061
00062
00063
00064
00065
00066
00069
00070 ObjectiveFunctional* TrainingAlgorithm::get_objective_functional_pointer(void)
00071 {
00072 return(objective_functional_pointer);
00073 }
00074
00075
00076
00077
00079
00080 TrainingAlgorithm::TrainingRateMethod TrainingAlgorithm::get_training_rate_method(void)
00081 {
00082 return(training_rate_method);
00083 }
00084
00085
00086
00087
00089
00090 std::string TrainingAlgorithm::get_training_rate_method_name(void)
00091 {
00092 switch(training_rate_method)
00093 {
00094 case Fixed:
00095 {
00096 return("Fixed");
00097 }
00098 break;
00099
00100 case GoldenSection:
00101 {
00102 return("GoldenSection");
00103 }
00104 break;
00105
00106 case BrentMethod:
00107 {
00108 return("BrentMethod");
00109 }
00110 break;
00111
00112 default:
00113 {
00114 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00115 << "std::string get_training_rate_method(void) method." << std::endl
00116 << "Unknown training rate method." << std::endl;
00117
00118 exit(1);
00119 }
00120 break;
00121 }
00122 }
00123
00124
00125
00126
00128
00129 double TrainingAlgorithm::get_first_training_rate(void)
00130 {
00131 return(first_training_rate);
00132 }
00133
00134
00135
00136
00138
00139 double TrainingAlgorithm::get_bracketing_factor(void)
00140 {
00141 return(bracketing_factor);
00142 }
00143
00144
00145
00146
00148
00149 double TrainingAlgorithm::get_training_rate_tolerance(void)
00150 {
00151 return(training_rate_tolerance);
00152 }
00153
00154
00155
00156
00159
00160 double TrainingAlgorithm::get_warning_parameters_norm(void)
00161 {
00162 return(warning_parameters_norm);
00163 }
00164
00165
00166
00167
00170
00171 double TrainingAlgorithm::get_warning_gradient_norm(void)
00172 {
00173 return(warning_gradient_norm);
00174 }
00175
00176
00177
00178
00181
00182 double TrainingAlgorithm::get_warning_training_rate(void)
00183 {
00184 return(warning_training_rate);
00185 }
00186
00187
00188
00189
00192
00193 double TrainingAlgorithm::get_error_parameters_norm(void)
00194 {
00195 return(error_parameters_norm);
00196 }
00197
00198
00199
00200
00203
00204 double TrainingAlgorithm::get_error_gradient_norm(void)
00205 {
00206 return(error_gradient_norm);
00207 }
00208
00209
00210
00211
00214
00215 double TrainingAlgorithm::get_error_training_rate(void)
00216 {
00217 return(error_training_rate);
00218 }
00219
00220
00221
00222
00224
00225 double TrainingAlgorithm::get_minimum_parameters_increment_norm(void)
00226 {
00227 return(minimum_parameters_increment_norm);
00228 }
00229
00230
00231
00232
00234
00235 double TrainingAlgorithm::get_minimum_evaluation_improvement(void)
00236 {
00237 return(minimum_evaluation_improvement);
00238 }
00239
00240
00241
00242
00245
00246 double TrainingAlgorithm::get_evaluation_goal(void)
00247 {
00248 return(evaluation_goal);
00249 }
00250
00251
00252
00253
00256
00257 double TrainingAlgorithm::get_gradient_norm_goal(void)
00258 {
00259 return(gradient_norm_goal);
00260 }
00261
00262
00263
00264
00266
00267 int TrainingAlgorithm::get_maximum_epochs_number(void)
00268 {
00269 return(maximum_epochs_number);
00270 }
00271
00272
00273
00274
00276
00277 double TrainingAlgorithm::get_maximum_time(void)
00278 {
00279 return(maximum_time);
00280 }
00281
00282
00283
00284
00286
00287 bool TrainingAlgorithm::get_early_stopping(void)
00288 {
00289 return(early_stopping);
00290 }
00291
00292
00293
00294
00296
00297 bool TrainingAlgorithm::get_reserve_parameters_history(void)
00298 {
00299 return(reserve_parameters_history);
00300 }
00301
00302
00303
00304
00306
00307 bool TrainingAlgorithm::get_reserve_parameters_norm_history(void)
00308 {
00309 return(reserve_parameters_norm_history);
00310 }
00311
00312
00313
00314
00316
00317 bool TrainingAlgorithm::get_reserve_evaluation_history(void)
00318 {
00319 return(reserve_evaluation_history);
00320 }
00321
00322
00323
00324
00326
00327 bool TrainingAlgorithm::get_reserve_gradient_history(void)
00328 {
00329 return(reserve_gradient_history);
00330 }
00331
00332
00333
00334
00336
00337 bool TrainingAlgorithm::get_reserve_gradient_norm_history(void)
00338 {
00339 return(reserve_gradient_norm_history);
00340 }
00341
00342
00343
00344
00345 bool TrainingAlgorithm::get_reserve_inverse_Hessian_history(void)
00346 {
00347 return(reserve_inverse_Hessian_history);
00348 }
00349
00350
00351
00352
00354
00355 bool TrainingAlgorithm::get_reserve_training_direction_history(void)
00356 {
00357 return(reserve_training_direction_history);
00358 }
00359
00360
00361
00362
00364
00365 bool TrainingAlgorithm::get_reserve_training_rate_history(void)
00366 {
00367 return(reserve_training_rate_history);
00368 }
00369
00370
00371
00372
00374
00375 bool TrainingAlgorithm::get_reserve_elapsed_time_history(void)
00376 {
00377 return(reserve_elapsed_time_history);
00378 }
00379
00380
00381
00382
00383 bool TrainingAlgorithm::get_reserve_validation_error_history(void)
00384 {
00385 return(reserve_validation_error_history);
00386 }
00387
00388
00389
00390
00392
00393 Vector< Vector<double> >& TrainingAlgorithm::get_parameters_history(void)
00394 {
00395 return(parameters_history);
00396 }
00397
00398
00399
00400
00402
00403 Vector<double>& TrainingAlgorithm::get_parameters_norm_history(void)
00404 {
00405 return(parameters_norm_history);
00406 }
00407
00408
00409
00410
00412
00413 Vector<double>& TrainingAlgorithm::get_evaluation_history(void)
00414 {
00415 return(evaluation_history);
00416 }
00417
00418
00419
00420
00422
00423 Vector< Vector<double> >& TrainingAlgorithm::get_gradient_history(void)
00424 {
00425 return(gradient_history);
00426 }
00427
00428
00429
00430
00432
00433 Vector<double>& TrainingAlgorithm::get_gradient_norm_history(void)
00434 {
00435 return(gradient_norm_history);
00436 }
00437
00438
00439
00440
00441 Vector< Matrix<double> >& TrainingAlgorithm::get_inverse_Hessian_history(void)
00442 {
00443 return(inverse_Hessian_history);
00444 }
00445
00446
00447
00448
00450
00451 Vector< Vector<double> >& TrainingAlgorithm::get_training_direction_history(void)
00452 {
00453 return(training_direction_history);
00454 }
00455
00456
00457
00458
00460
00461 Vector<double>& TrainingAlgorithm::get_training_rate_history(void)
00462 {
00463 return(training_rate_history);
00464 }
00465
00466
00467
00468
00470
00471 Vector<double>& TrainingAlgorithm::get_elapsed_time_history(void)
00472 {
00473 return(elapsed_time_history);
00474 }
00475
00476
00477
00478
00479 Vector<double>& TrainingAlgorithm::get_validation_error_history(void)
00480 {
00481 return(validation_error_history);
00482 }
00483
00484
00485
00486
00488
00489 int TrainingAlgorithm::get_display_period(void)
00490 {
00491 return(display_period);
00492 }
00493
00494
00495
00496
00499
00500 bool TrainingAlgorithm::get_display(void)
00501 {
00502 return(display);
00503 }
00504
00505
00506
00507
00510
00511 void TrainingAlgorithm::set(void)
00512 {
00513 objective_functional_pointer = NULL;
00514 set_default();
00515 }
00516
00517
00518
00519
00523
00524 void TrainingAlgorithm::set(ObjectiveFunctional* new_objective_functional_pointer)
00525 {
00526 objective_functional_pointer = new_objective_functional_pointer;
00527 set_default();
00528 }
00529
00530
00531
00532
00566
00567 void TrainingAlgorithm::set_default(void)
00568 {
00569
00570
00571 training_rate_method = BrentMethod;
00572
00573
00574
00575 bracketing_factor = 1.5;
00576 first_training_rate = 1.0e-2;
00577 training_rate_tolerance = 1.0e-6;
00578
00579 warning_parameters_norm = 1.0e6;
00580 warning_gradient_norm = 1.0e6;
00581 warning_training_rate = 1.0e6;
00582
00583 error_parameters_norm = 1.0e9;
00584 error_gradient_norm = 1.0e9;
00585 error_training_rate = 1.0e9;
00586
00587
00588
00589 minimum_parameters_increment_norm = 0.0;
00590
00591 minimum_evaluation_improvement = 0.0;
00592 evaluation_goal = -1.0e99;
00593 gradient_norm_goal = 0.0;
00594 early_stopping = false;
00595
00596 maximum_epochs_number = 1000;
00597 maximum_time = 1000.0;
00598
00599
00600
00601 reserve_parameters_history = false;
00602 reserve_parameters_norm_history = false;
00603
00604 reserve_evaluation_history = false;
00605 reserve_gradient_history = false;
00606 reserve_gradient_norm_history = false;
00607 reserve_inverse_Hessian_history = false;
00608 reserve_validation_error_history = false;
00609
00610 reserve_training_direction_history = false;
00611 reserve_training_rate_history = false;
00612 reserve_elapsed_time_history = false;
00613
00614
00615
00616 display = true;
00617 display_period = 100;
00618 }
00619
00620
00621
00622
00625
00626 void TrainingAlgorithm::set_objective_functional_pointer(ObjectiveFunctional* new_objective_functional_pointer)
00627 {
00628 objective_functional_pointer = new_objective_functional_pointer;
00629 }
00630
00631
00632
00633
00636
00637 void TrainingAlgorithm::set_training_rate_method(const TrainingAlgorithm::TrainingRateMethod& new_training_rate_method)
00638 {
00639 training_rate_method = new_training_rate_method;
00640 }
00641
00642
00643
00644
00647
00648 void TrainingAlgorithm::set_training_rate_method(const std::string& new_training_rate_method_name)
00649 {
00650 if(new_training_rate_method_name == "Fixed")
00651 {
00652 training_rate_method = Fixed;
00653 }
00654 else if(new_training_rate_method_name == "GoldenSection")
00655 {
00656 training_rate_method = GoldenSection;
00657 }
00658 else if(new_training_rate_method_name == "BrentMethod")
00659 {
00660 training_rate_method = BrentMethod;
00661 }
00662 else
00663 {
00664 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00665 << "void set_training_rate_method(const std::string&) method." << std::endl
00666 << "Unknown training rate method name: " << new_training_rate_method_name << "." <<std::endl;
00667
00668 exit(1);
00669 }
00670 }
00671
00672
00673
00674
00677
00678 void TrainingAlgorithm::set_first_training_rate(double new_first_training_rate)
00679 {
00680
00681
00682 #ifdef _DEBUG
00683
00684 if(new_first_training_rate < 0.0)
00685 {
00686 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00687 << "void set_first_training_rate(double) method." << std::endl
00688 << "First training rate must be equal or greater than 0." << std::endl;
00689
00690 exit(1);
00691 }
00692
00693 #endif
00694
00695
00696
00697 first_training_rate = new_first_training_rate;
00698 }
00699
00700
00701
00702
00705
00706 void TrainingAlgorithm::set_bracketing_factor(double new_bracketing_factor)
00707 {
00708
00709
00710 #ifdef _DEBUG
00711
00712 if(new_bracketing_factor < 0.0)
00713 {
00714 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00715 << "void set_bracketing_factor(double) method." << std::endl
00716 << "Bracketing factor must be equal or greater than 0." << std::endl;
00717
00718 exit(1);
00719 }
00720
00721 #endif
00722
00723 bracketing_factor = new_bracketing_factor;
00724 }
00725
00726
00727
00728
00731
00732 void TrainingAlgorithm::set_training_rate_tolerance(double new_training_rate_tolerance)
00733 {
00734
00735
00736 #ifdef _DEBUG
00737
00738 if(new_training_rate_tolerance < 0.0)
00739 {
00740 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00741 << "void set_training_rate_tolerance(double) method." << std::endl
00742 << "Tolerance must be equal or greater than 0." << std::endl;
00743
00744 exit(1);
00745 }
00746
00747 #endif
00748
00749
00750
00751 training_rate_tolerance = new_training_rate_tolerance;
00752 }
00753
00754
00755
00756
00760
00761 void TrainingAlgorithm::set_warning_parameters_norm(double new_warning_parameters_norm)
00762 {
00763
00764
00765 #ifdef _DEBUG
00766
00767 if(new_warning_parameters_norm < 0.0)
00768 {
00769 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00770 << "void set_warning_parameters_norm(double) method." << std::endl
00771 << "Warning parameters norm must be equal or greater than 0." << std::endl;
00772
00773 exit(1);
00774 }
00775
00776 #endif
00777
00778
00779
00780 warning_parameters_norm = new_warning_parameters_norm;
00781 }
00782
00783
00784
00785
00789
00790 void TrainingAlgorithm::set_warning_gradient_norm(double new_warning_gradient_norm)
00791 {
00792
00793
00794 #ifdef _DEBUG
00795
00796 if(new_warning_gradient_norm < 0.0)
00797 {
00798 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00799 << "void set_warning_gradient_norm(double) method." << std::endl
00800 << "Warning gradient norm must be equal or greater than 0." << std::endl;
00801
00802 exit(1);
00803 }
00804
00805 #endif
00806
00807
00808
00809 warning_gradient_norm = new_warning_gradient_norm;
00810 }
00811
00812
00813
00814
00818
00819 void TrainingAlgorithm::set_warning_training_rate(double new_warning_training_rate)
00820 {
00821
00822
00823 #ifdef _DEBUG
00824
00825 if(new_warning_training_rate < 0.0)
00826 {
00827 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00828 << "void set_warning_training_rate(double) method." << std::endl
00829 << "Warning training rate must be equal or greater than 0." << std::endl;
00830
00831 exit(1);
00832 }
00833
00834 #endif
00835
00836 warning_training_rate = new_warning_training_rate;
00837 }
00838
00839
00840
00841
00845
00846 void TrainingAlgorithm::set_error_parameters_norm(double new_error_parameters_norm)
00847 {
00848
00849
00850 #ifdef _DEBUG
00851
00852 if(new_error_parameters_norm < 0.0)
00853 {
00854 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00855 << "void set_error_parameters_norm(double) method." << std::endl
00856 << "Error parameters norm must be equal or greater than 0." << std::endl;
00857
00858 exit(1);
00859 }
00860
00861 #endif
00862
00863
00864
00865 error_parameters_norm = new_error_parameters_norm;
00866 }
00867
00868
00869
00870
00874
00875 void TrainingAlgorithm::set_error_gradient_norm(double new_error_gradient_norm)
00876 {
00877
00878
00879 #ifdef _DEBUG
00880
00881 if(new_error_gradient_norm < 0.0)
00882 {
00883 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00884 << "void set_error_gradient_norm(double) method." << std::endl
00885 << "Error gradient norm must be equal or greater than 0." << std::endl;
00886
00887 exit(1);
00888 }
00889
00890 #endif
00891
00892
00893
00894 error_gradient_norm = new_error_gradient_norm;
00895 }
00896
00897
00898
00899
00903
00904 void TrainingAlgorithm::set_error_training_rate(double new_error_training_rate)
00905 {
00906
00907
00908 #ifdef _DEBUG
00909
00910 if(new_error_training_rate < 0.0)
00911 {
00912 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00913 << "void set_error_training_rate(double) method." << std::endl
00914 << "Error training rate must be equal or greater than 0." << std::endl;
00915
00916 exit(1);
00917 }
00918
00919 #endif
00920
00921
00922
00923 error_training_rate = new_error_training_rate;
00924 }
00925
00926
00927
00928
00931
00932 void TrainingAlgorithm::set_minimum_parameters_increment_norm(double new_minimum_parameters_increment_norm)
00933 {
00934
00935
00936 #ifdef _DEBUG
00937
00938 if(new_minimum_parameters_increment_norm < 0.0)
00939 {
00940 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00941 << "void new_minimum_parameters_increment_norm(double) method." << std::endl
00942 << "Minimum parameters increment norm must be equal or greater than 0." << std::endl;
00943
00944 exit(1);
00945 }
00946
00947 #endif
00948
00949
00950
00951 minimum_parameters_increment_norm = new_minimum_parameters_increment_norm;
00952 }
00953
00954
00955
00956
00959
00960 void TrainingAlgorithm::set_minimum_evaluation_improvement(double new_minimum_evaluation_improvement)
00961 {
00962
00963
00964 #ifdef _DEBUG
00965
00966 if(new_minimum_evaluation_improvement < 0.0)
00967 {
00968 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
00969 << "void set_minimum_evaluation_improvement(double) method." << std::endl
00970 << "Minimum evaluation improvement must be equal or greater than 0." << std::endl;
00971
00972 exit(1);
00973 }
00974
00975 #endif
00976
00977
00978
00979 minimum_evaluation_improvement = new_minimum_evaluation_improvement;
00980 }
00981
00982
00983
00984
00988
00989 void TrainingAlgorithm::set_evaluation_goal(double new_evaluation_goal)
00990 {
00991 evaluation_goal = new_evaluation_goal;
00992 }
00993
00994
00995
00996
01000
01001 void TrainingAlgorithm::set_gradient_norm_goal(double new_gradient_norm_goal)
01002 {
01003
01004
01005 #ifdef _DEBUG
01006
01007 if(new_gradient_norm_goal < 0.0)
01008 {
01009 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
01010 << "void set_gradient_norm_goal(double) method." << std::endl
01011 << "Gradient norm goal must be equal or greater than 0." << std::endl;
01012
01013 exit(1);
01014 }
01015
01016 #endif
01017
01018
01019
01020 gradient_norm_goal = new_gradient_norm_goal;
01021 }
01022
01023
01024
01025
01028
01029 void TrainingAlgorithm::set_maximum_epochs_number(int new_maximum_epochs_number)
01030 {
01031
01032
01033 #ifdef _DEBUG
01034
01035 if(new_maximum_epochs_number < 0.0)
01036 {
01037 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
01038 << "void set_maximum_epochs_number(int) method." << std::endl
01039 << "Number of epochs must be equal or greater than 0." << std::endl;
01040
01041 exit(1);
01042 }
01043
01044 #endif
01045
01046
01047
01048 maximum_epochs_number = new_maximum_epochs_number;
01049 }
01050
01051
01052
01053
01056
01057 void TrainingAlgorithm::set_maximum_time(double new_maximum_time)
01058 {
01059
01060
01061 #ifdef _DEBUG
01062
01063 if(new_maximum_time < 0.0)
01064 {
01065 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
01066 << "void set_maximum_time(double) method." << std::endl
01067 << "Maximum time must be equal or greater than 0." << std::endl;
01068
01069 exit(1);
01070 }
01071
01072 #endif
01073
01074
01075
01076 maximum_time = new_maximum_time;
01077 }
01078
01079
01080
01081
01084
01085 void TrainingAlgorithm::set_early_stopping(bool new_early_stopping)
01086 {
01087 early_stopping = new_early_stopping;
01088 }
01089
01090
01091
01092
01096
01097 void TrainingAlgorithm::set_reserve_parameters_history(bool new_reserve_parameters_history)
01098 {
01099 reserve_parameters_history = new_reserve_parameters_history;
01100 }
01101
01102
01103
01104
01108
01109 void TrainingAlgorithm::set_reserve_parameters_norm_history(bool new_reserve_parameters_norm_history)
01110 {
01111 reserve_parameters_norm_history = new_reserve_parameters_norm_history;
01112 }
01113
01114
01115
01116
01119
01120 void TrainingAlgorithm::set_reserve_evaluation_history(bool new_reserve_evaluation_history)
01121 {
01122 reserve_evaluation_history = new_reserve_evaluation_history;
01123 }
01124
01125
01126
01127
01130
01131 void TrainingAlgorithm::set_reserve_gradient_history(bool new_reserve_gradient_history)
01132 {
01133 reserve_gradient_history = new_reserve_gradient_history;
01134 }
01135
01136
01137
01138
01142
01143 void TrainingAlgorithm::set_reserve_gradient_norm_history(bool new_reserve_gradient_norm_history)
01144 {
01145 reserve_gradient_norm_history = new_reserve_gradient_norm_history;
01146 }
01147
01148
01149
01150
01154
01155 void TrainingAlgorithm::set_reserve_inverse_Hessian_history(bool new_reserve_inverse_Hessian_history)
01156 {
01157 reserve_inverse_Hessian_history = new_reserve_inverse_Hessian_history;
01158 }
01159
01160
01161
01162
01166
01167 void TrainingAlgorithm::set_reserve_training_direction_history(bool new_reserve_training_direction_history)
01168 {
01169 reserve_training_direction_history = new_reserve_training_direction_history;
01170 }
01171
01172
01173
01174
01178
01179 void TrainingAlgorithm::set_reserve_training_rate_history(bool new_reserve_training_rate_history)
01180 {
01181 reserve_training_rate_history = new_reserve_training_rate_history;
01182 }
01183
01184
01185
01186
01190
01191 void TrainingAlgorithm::set_reserve_elapsed_time_history(bool new_reserve_elapsed_time_history)
01192 {
01193 reserve_elapsed_time_history = new_reserve_elapsed_time_history;
01194 }
01195
01196
01197
01198
01202
01203 void TrainingAlgorithm::set_reserve_validation_error_history(bool new_reserve_validation_error_history)
01204 {
01205 reserve_validation_error_history = new_reserve_validation_error_history;
01206 }
01207
01208
01209
01210
01214
01215 void TrainingAlgorithm::set_parameters_history(const Vector< Vector<double> >& new_parameters_history)
01216 {
01217 parameters_history = new_parameters_history;
01218 }
01219
01220
01221
01222
01227
01228 void TrainingAlgorithm::set_parameters_norm_history(const Vector<double>& new_parameters_norm_history)
01229 {
01230 parameters_norm_history = new_parameters_norm_history;
01231 }
01232
01233
01234
01235
01239
01240 void TrainingAlgorithm::set_evaluation_history(const Vector<double>& new_evaluation_history)
01241 {
01242 evaluation_history = new_evaluation_history;
01243 }
01244
01245
01246
01247
01252
01253 void TrainingAlgorithm::set_gradient_history(const Vector< Vector<double> >& new_gradient_history)
01254 {
01255 gradient_history = new_gradient_history;
01256 }
01257
01258
01259
01260
01264
01265 void TrainingAlgorithm::set_gradient_norm_history(const Vector<double>& new_gradient_norm_history)
01266 {
01267 gradient_norm_history = new_gradient_norm_history;
01268 }
01269
01270
01271
01272
01275
01276 void TrainingAlgorithm::set_inverse_Hessian_history(const Vector< Matrix<double> >& new_inverse_Hessian_history)
01277 {
01278 inverse_Hessian_history = new_inverse_Hessian_history;
01279 }
01280
01281
01282
01283
01287
01288 void TrainingAlgorithm::
01289 set_training_direction_history(const Vector< Vector<double> >& new_training_direction_history)
01290 {
01291 training_direction_history = new_training_direction_history;
01292 }
01293
01294
01295
01296
01300
01301 void TrainingAlgorithm::set_training_rate_history(const Vector<double>& new_training_rate_history)
01302 {
01303 training_rate_history = new_training_rate_history;
01304 }
01305
01306
01307
01308
01312
01313 void TrainingAlgorithm::set_elapsed_time_history(const Vector<double>& new_elapsed_time_history)
01314 {
01315 elapsed_time_history = new_elapsed_time_history;
01316 }
01317
01318
01319
01320
01324
01325 void TrainingAlgorithm::set_display_period(int new_display_period)
01326 {
01327
01328
01329 #ifdef _DEBUG
01330
01331 if(new_display_period <= 0)
01332 {
01333 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
01334 << "void set_display_period(double) method." << std::endl
01335 << "First training rate must be greater than 0." << std::endl;
01336
01337 exit(1);
01338 }
01339
01340 #endif
01341
01342 display_period = new_display_period;
01343 }
01344
01345
01346
01347
01352
01353 void TrainingAlgorithm::set_display(bool new_display)
01354 {
01355 display = new_display;
01356 }
01357
01358
01359
01360
01366
01367 Vector<double> TrainingAlgorithm::calculate_training_rate_evaluation
01368 (double evaluation, const Vector<double>& training_direction, double initial_training_rate)
01369 {
01370 Vector<double> training_rate_evaluation(2);
01371
01372
01373
01374 switch(training_rate_method)
01375 {
01376 case TrainingAlgorithm::Fixed:
01377 {
01378 return(calculate_fixed_training_rate_evaluation(evaluation, training_direction, initial_training_rate));
01379 }
01380 break;
01381
01382 case TrainingAlgorithm::GoldenSection:
01383 {
01384 return(calculate_golden_section_training_rate_evaluation(evaluation, training_direction, initial_training_rate));
01385 }
01386 break;
01387
01388 case TrainingAlgorithm::BrentMethod:
01389 {
01390 return(calculate_Brent_method_training_rate_evaluation(evaluation, training_direction, initial_training_rate));
01391 }
01392 break;
01393
01394 default:
01395 {
01396 std::cerr << "Flood Error: TrainingAlgorithm class" << std::endl
01397 << "void calculate_training_rate_evaluation(double, const Vector<double>&, double) method." << std::endl
01398 << "Unknown training rate method." << std::endl;
01399
01400 exit(1);
01401 }
01402 }
01403 }
01404
01405
01406
01407
01414
01415 Vector<double> TrainingAlgorithm::calculate_bracketing_training_rate_evaluation
01416 (double evaluation, const Vector<double>& training_direction, double initial_training_rate)
01417 {
01418
01419
01420 MultilayerPerceptron* multilayer_perceptron_pointer = objective_functional_pointer->get_multilayer_perceptron_pointer();
01421
01422 int parameters_number = multilayer_perceptron_pointer->get_parameters_number();
01423
01424 Vector<double> parameters = multilayer_perceptron_pointer->get_parameters();
01425
01426
01427
01428 Vector<double> potential_parameters(parameters_number);
01429 Vector<double> parameters_increment(parameters_number);
01430
01431
01432
01433 double bracketing_training_rate = initial_training_rate;
01434
01435 parameters_increment = training_direction*bracketing_training_rate;
01436
01437 potential_parameters = parameters + parameters_increment;
01438
01439 double bracketing_evaluation = objective_functional_pointer->calculate_potential_evaluation(potential_parameters);
01440
01441
01442
01443 while(evaluation > bracketing_evaluation)
01444 {
01445
01446
01447 bracketing_training_rate *= bracketing_factor;
01448
01449 if(bracketing_training_rate >= error_training_rate)
01450 {
01451 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
01452 << "Vector<double> calculate_bracketing_training_rate_evaluation(double, Vector<double>, double) method." << std::endl
01453 << "Unable to bracket a minimum." << std::endl;
01454
01455 std::cerr << "Evaluation : " << evaluation << std::endl
01456 << "Training direction norm: " << training_direction.calculate_norm() << std::endl
01457 << "Initial training rate: " << initial_training_rate << std::endl;
01458
01459 exit(1);
01460 }
01461 else if(display && bracketing_training_rate >= warning_training_rate)
01462 {
01463 std::cout << "Flood Warning: Training rate is " << bracketing_training_rate << std::endl;
01464 }
01465
01466
01467
01468 parameters_increment = training_direction*bracketing_training_rate;
01469
01470 potential_parameters = parameters + parameters_increment;
01471
01472 bracketing_evaluation = objective_functional_pointer->calculate_potential_evaluation(potential_parameters);
01473 }
01474
01475
01476
01477 Vector<double> bracketing_training_rate_evaluation(2);
01478 bracketing_training_rate_evaluation[0] = bracketing_training_rate;
01479 bracketing_training_rate_evaluation[1] = bracketing_evaluation;
01480
01481 return(bracketing_training_rate_evaluation);
01482 }
01483
01484
01485
01486
01490
01491 Vector<double> TrainingAlgorithm::calculate_fixed_training_rate_evaluation
01492 (double, const Vector<double>& training_direction, double)
01493 {
01494 Vector<double> training_rate_evaluation(2);
01495
01496 MultilayerPerceptron* multilayer_perceptron_pointer
01497 = objective_functional_pointer->get_multilayer_perceptron_pointer();
01498
01499 Vector<double> parameters = multilayer_perceptron_pointer->get_parameters();
01500
01501 Vector<double> parameters_increment = training_direction*first_training_rate;
01502
01503 Vector<double> potential_parameters = parameters + parameters_increment;
01504
01505 double evaluation = objective_functional_pointer->calculate_potential_evaluation(potential_parameters);
01506
01507 training_rate_evaluation[0] = first_training_rate;
01508 training_rate_evaluation[1] = evaluation;
01509
01510 return(training_rate_evaluation);
01511 }
01512
01513
01514
01515
01521
01522 Vector<double> TrainingAlgorithm::calculate_golden_section_training_rate_evaluation
01523 (double evaluation, const Vector<double>& training_direction, double initial_training_rate)
01524 {
01525 Vector<double> training_rate_evaluation(2);
01526
01527
01528
01529 MultilayerPerceptron* multilayer_perceptron_pointer = objective_functional_pointer->get_multilayer_perceptron_pointer();
01530
01531 int parameters_number = multilayer_perceptron_pointer->get_parameters_number();
01532
01533 Vector<double> parameters = multilayer_perceptron_pointer->get_parameters();
01534
01535 Vector<double> potential_parameters(parameters_number);
01536 Vector<double> parameters_increment(parameters_number);
01537
01538
01539
01540 const double tau = (3.0-sqrt(5.0))/2.0;
01541
01542
01543
01544 double a = 0.0;
01545 double evaluation_a = evaluation;
01546
01547
01548
01549 Vector<double> b_and_evaluation
01550 = calculate_bracketing_training_rate_evaluation(evaluation, training_direction, initial_training_rate);
01551
01552 double b = b_and_evaluation[0];
01553 double evaluation_b = b_and_evaluation[1];
01554
01555
01556
01557 double c = a + tau*(b-a);
01558 parameters_increment = training_direction*c;
01559
01560 potential_parameters = parameters + parameters_increment;
01561
01562 double evaluation_c = objective_functional_pointer->calculate_potential_evaluation(potential_parameters);
01563
01564 double d = b -tau*(b-a);
01565
01566 parameters_increment = training_direction*d;
01567
01568 potential_parameters = parameters + parameters_increment;
01569
01570 double evaluation_d = objective_functional_pointer->calculate_potential_evaluation(potential_parameters);
01571
01572
01573
01574 while(b-a > training_rate_tolerance)
01575 {
01576 Vector<double> evaluation_vector_left(3);
01577 evaluation_vector_left[0] = evaluation_a;
01578 evaluation_vector_left[1] = evaluation_c;
01579 evaluation_vector_left[2] = evaluation_d;
01580
01581 double minimum_evaluation_left = evaluation_vector_left.calculate_minimum();
01582
01583 Vector<double> evaluation_vector_right(3);
01584 evaluation_vector_right[0] = evaluation_b;
01585 evaluation_vector_right[1] = evaluation_c;
01586 evaluation_vector_right[2] = evaluation_d;
01587
01588 double minimum_evaluation_right = evaluation_vector_right.calculate_minimum();
01589
01590 if((evaluation_c <= evaluation_d && evaluation_b >= minimum_evaluation_left)
01591 || (evaluation_a <= minimum_evaluation_right))
01592
01593
01594 {
01595 b=d;
01596 d=c;
01597
01598 evaluation_b = evaluation_d;
01599 evaluation_d = evaluation_c;
01600
01601
01602
01603 c = a + tau*(b-a);
01604
01605 parameters_increment = training_direction*c;
01606
01607 potential_parameters = parameters + parameters_increment;
01608
01609 evaluation_c = objective_functional_pointer->calculate_potential_evaluation(potential_parameters);
01610 }
01611 else if((evaluation_d <= evaluation_c && evaluation_a >= minimum_evaluation_right)
01612 || (evaluation_b <= minimum_evaluation_left))
01613
01614
01615 {
01616 a = c;
01617 c = d;
01618
01619 evaluation_a = evaluation_c;
01620 evaluation_c = evaluation_d;
01621
01622
01623
01624 d = b - tau*(b-a);
01625
01626 parameters_increment = training_direction*d;
01627
01628 potential_parameters = parameters + parameters_increment;
01629
01630 evaluation_d = objective_functional_pointer->calculate_potential_evaluation(potential_parameters);
01631 }
01632 else
01633 {
01634 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
01635 << "Vector<double> calculate_golden_section_training_rate_evaluation(double, const Vector<double>&, double) method." << std::endl
01636 << "Unable to find were the minimum is." << std::endl;
01637
01638 exit(1);
01639 }
01640 }
01641
01642
01643
01644 double minimum_evaluation = evaluation;
01645 double training_rate = 0.0;
01646
01647 if(evaluation_a < minimum_evaluation)
01648 {
01649 minimum_evaluation = evaluation_a;
01650 training_rate = a;
01651 }
01652 else if(evaluation_b < minimum_evaluation)
01653 {
01654 minimum_evaluation = evaluation_b;
01655 training_rate = b;
01656 }
01657 else if(evaluation_c < minimum_evaluation)
01658 {
01659 minimum_evaluation = evaluation_c;
01660 training_rate = c;
01661 }
01662 else if(evaluation_d < minimum_evaluation)
01663 {
01664 minimum_evaluation = evaluation_d;
01665 training_rate = d;
01666 }
01667
01668 training_rate_evaluation[0] = training_rate;
01669 training_rate_evaluation[1] = minimum_evaluation;
01670
01671 return(training_rate_evaluation);
01672 }
01673
01674
01675
01676
01682
01683 Vector<double> TrainingAlgorithm::calculate_Brent_method_training_rate_evaluation
01684 (double evaluation, const Vector<double>& training_direction, double initial_training_rate)
01685 {
01686 Vector<double> training_rate_evaluation(2);
01687
01688
01689
01690 MultilayerPerceptron* multilayer_perceptron_pointer
01691 = objective_functional_pointer->get_multilayer_perceptron_pointer();
01692
01693 int parameters_number = multilayer_perceptron_pointer->get_parameters_number();
01694
01695 Vector<double> parameters = multilayer_perceptron_pointer->get_parameters();
01696
01697 Vector<double> potential_parameters(parameters_number);
01698
01699 Vector<double> parameters_increment(parameters_number);
01700
01701
01702
01703 double tau = (3.0-sqrt(5.0))/2.0;
01704
01705
01706
01707 double a = 0.0;
01708 double evaluation_a = evaluation;
01709
01710
01711
01712 Vector<double> b_and_evaluation
01713 = calculate_bracketing_training_rate_evaluation(evaluation, training_direction, initial_training_rate);
01714
01715 double b = b_and_evaluation[0];
01716 double evaluation_b = b_and_evaluation[1];
01717
01718 double u = 0.0;
01719 double evaluation_u = evaluation;
01720 double v = 0.0;
01721 double evaluation_v = evaluation;
01722 double w = 0.0;
01723 double evaluation_w = evaluation;
01724 double x = 0.0;
01725 double evaluation_x = evaluation;
01726
01727
01728
01729 v = a + tau*(b-a);
01730
01731 parameters_increment = training_direction*v;
01732
01733 potential_parameters = parameters + parameters_increment;
01734
01735 evaluation_v = objective_functional_pointer->calculate_potential_evaluation(potential_parameters);
01736
01737
01738
01739 w = v;
01740 evaluation_w = evaluation_v;
01741
01742 x = v;
01743 evaluation_x = evaluation_v;
01744
01745
01746
01747 bool golden_section = false;
01748
01749
01750
01751 while(b-a > training_rate_tolerance)
01752 {
01753
01754
01755 if(w != x && w != v && x != v)
01756 {
01757
01758
01759 Vector<double> training_rate_vector(3);
01760 training_rate_vector[0] = v;
01761 training_rate_vector[1] = w;
01762 training_rate_vector[2] = x;
01763
01764 std::sort(training_rate_vector.begin(), training_rate_vector.end(), std::less<double>());
01765
01766
01767
01768 Vector<double> evaluation_vector(3);
01769
01770 for(int i = 0; i < 3; i++)
01771 {
01772 if(training_rate_vector[i] == v)
01773 {
01774 evaluation_vector[i] = evaluation_v;
01775 }
01776 else if(training_rate_vector[i] == w)
01777 {
01778 training_rate_vector[i] = evaluation_w;
01779 }
01780 else if(training_rate_vector[i] == x)
01781 {
01782 training_rate_vector[i] = evaluation_x;
01783 }
01784 else
01785 {
01786 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
01787 << "Vector<double> calculate_Brent_method_training_rate_evaluation(double, const Vector<double>&, double) method." << std::endl
01788 << "Unable to construct training rate and evaluation vectors right." << std::endl;
01789
01790 exit(1);
01791 }
01792 }
01793
01794
01795
01796 double numerator
01797 = (pow(training_rate_vector[2],2) - pow(training_rate_vector[1],2))*evaluation_vector[0]
01798 + (pow(training_rate_vector[1],2) - pow(training_rate_vector[0],2))*evaluation_vector[2]
01799 + (pow(training_rate_vector[0],2) - pow(training_rate_vector[2],2))*evaluation_vector[1];
01800
01801 double denominator
01802 = (training_rate_vector[2] - training_rate_vector[1])*evaluation_vector[0]
01803 + (training_rate_vector[1] - training_rate_vector[0])*evaluation_vector[2]
01804 + (training_rate_vector[0] - training_rate_vector[2])*evaluation_vector[1];
01805
01806 double xStar = 0.5*numerator/denominator;
01807
01808 if(xStar < b && a < xStar)
01809 {
01810 u = xStar;
01811
01812
01813
01814 golden_section = false;
01815 }
01816 else
01817 {
01818
01819
01820 golden_section = true;
01821 }
01822 }
01823 else
01824 {
01825
01826
01827 golden_section = true;
01828 }
01829
01830
01831
01832
01833
01834 if(golden_section == true)
01835 {
01836 if(x >= (a+b)/2.0)
01837 {
01838 u = x-tau*(x-a);
01839 }
01840 else
01841 {
01842 u = x+tau*(b-x);
01843 }
01844 }
01845
01846
01847
01848 parameters_increment = training_direction*u;
01849
01850 potential_parameters = parameters + parameters_increment;
01851
01852 evaluation_u = objective_functional_pointer->calculate_potential_evaluation(potential_parameters);
01853
01854
01855
01856 if(evaluation_u <= evaluation_x)
01857 {
01858 if(u < x)
01859 {
01860 b = x;
01861 evaluation_b = evaluation_x;
01862 }
01863 else
01864 {
01865 a = x;
01866 evaluation_a = evaluation_x;
01867 }
01868
01869 v = w;
01870 evaluation_v = evaluation_w;
01871
01872 w = x;
01873 evaluation_w = evaluation_x;
01874
01875 x = u;
01876 evaluation_x = evaluation_u;
01877 }
01878 else
01879 {
01880 if(u < x)
01881 {
01882 a = u;
01883 evaluation_a = evaluation_u;
01884 }
01885 else
01886 {
01887 b = u;
01888 evaluation_b = evaluation_u;
01889 }
01890
01891 if((evaluation_u <= evaluation_w) || (w == x))
01892 {
01893 v = w;
01894 evaluation_v = evaluation_w;
01895
01896 w = u;
01897 evaluation_w = evaluation_u;
01898 }
01899 else if((evaluation_u <= evaluation_v) || (v == x) || (v == w))
01900 {
01901 v = u;
01902 evaluation_v = evaluation_u;
01903 }
01904 }
01905 }
01906
01907
01908
01909 double minimum_evaluation = evaluation;
01910 double training_rate = 0.0;
01911
01912 if(evaluation_a < minimum_evaluation)
01913 {
01914 minimum_evaluation = evaluation_a;
01915 training_rate = a;
01916 }
01917 else if(evaluation_b < minimum_evaluation)
01918 {
01919 minimum_evaluation = evaluation_b;
01920 training_rate = b;
01921 }
01922 else if(evaluation_u < minimum_evaluation)
01923 {
01924 minimum_evaluation = evaluation_u;
01925 training_rate = u;
01926 }
01927 else if(evaluation_v < minimum_evaluation)
01928 {
01929 minimum_evaluation = evaluation_v;
01930 training_rate = v;
01931 }
01932 else if(evaluation_w < minimum_evaluation)
01933 {
01934 minimum_evaluation = evaluation_w;
01935 training_rate = w;
01936 }
01937 else if(evaluation_x < minimum_evaluation)
01938 {
01939 minimum_evaluation = evaluation_x;
01940 training_rate = x;
01941 }
01942
01943 training_rate_evaluation[0] = training_rate;
01944 training_rate_evaluation[1] = minimum_evaluation;
01945
01946 return(training_rate_evaluation);
01947 }
01948
01949
01950
01951
01955
01956 std::string TrainingAlgorithm::to_XML(bool show_declaration)
01957 {
01958 std::stringstream buffer;
01959
01960
01961
01962 if(show_declaration)
01963 {
01964 buffer << "<Flood version='3.0' class='TrainingAlgorithm'>" << std::endl;
01965 }
01966
01967
01968
01969 buffer << "<TrainingRateMethod>" << std::endl
01970 << get_training_rate_method_name() << std::endl
01971 << "</TrainingRateMethod>" << std::endl;
01972
01973
01974
01975 buffer << "<BracketingFactor> " << std::endl
01976 << bracketing_factor << std::endl
01977 << "</BracketingFactor> " << std::endl;
01978
01979 buffer << "<FirstTrainingRate> " << std::endl
01980 << first_training_rate << std::endl
01981 << "</FirstTrainingRate> " << std::endl;
01982
01983 buffer << "<TrainingRateTolerance>" << std::endl
01984 << training_rate_tolerance << std::endl
01985 << "</TrainingRateTolerance>" << std::endl;
01986
01987 buffer << "<WarningParametersNorm>" << std::endl
01988 << warning_parameters_norm << std::endl
01989 << "</WarningParametersNorm>" << std::endl;
01990
01991 buffer << "<WarningGradientNorm>" << std::endl
01992 << warning_gradient_norm << std::endl
01993 << "</WarningGradientNorm>" << std::endl;
01994
01995 buffer << "<WarningTrainingRate>" << std::endl
01996 << warning_training_rate << std::endl
01997 << "</WarningTrainingRate>" << std::endl;
01998
01999 buffer << "<ErrorParametersNorm>" << std::endl
02000 << error_parameters_norm << std::endl
02001 << "</ErrorParametersNorm>" << std::endl;
02002
02003 buffer << "<ErrorGradientNorm>" << std::endl
02004 << error_gradient_norm << std::endl
02005 << "</ErrorGradientNorm>" << std::endl;
02006
02007 buffer << "<ErrorTrainingRate>" << std::endl
02008 << error_training_rate << std::endl
02009 << "</ErrorTrainingRate>" << std::endl;
02010
02011
02012
02013 buffer << "<MinimumParametersIncrementNorm>" << std::endl
02014 << minimum_parameters_increment_norm << std::endl
02015 << "</MinimumParametersIncrementNorm>" << std::endl;
02016
02017 buffer << "<MinimumEvaluationImprovement>" << std::endl
02018 << minimum_evaluation_improvement << std::endl
02019 << "</MinimumEvaluationImprovement>" << std::endl;
02020
02021 buffer << "<EvaluationGoal>" << std::endl
02022 << evaluation_goal << std::endl
02023 << "</EvaluationGoal>" << std::endl;
02024
02025 buffer << "<GradientNormGoal>" << std::endl
02026 << gradient_norm_goal << std::endl
02027 << "</GradientNormGoal>" << std::endl;
02028
02029 buffer << "<MaximumEpochsNumber>" << std::endl
02030 << maximum_epochs_number << std::endl
02031 << "</MaximumEpochsNumber>" << std::endl;
02032
02033 buffer << "<MaximumTime>" << std::endl
02034 << maximum_time << std::endl
02035 << "</MaximumTime>" << std::endl;
02036
02037
02038
02039 buffer << "<EarlyStopping>" << std::endl
02040 << early_stopping << std::endl
02041 << "</EarlyStopping>" << std::endl;
02042
02043
02044
02045 buffer << "<ReserveParametersHistory>" << std::endl
02046 << reserve_parameters_history << std::endl
02047 << "</ReserveParametersHistory>" << std::endl;
02048
02049 buffer << "<ReserveParametersNormHistory>" << std::endl
02050 << reserve_parameters_norm_history << std::endl
02051 << "</ReserveParametersNormHistory>" << std::endl;
02052
02053 buffer << "<ReserveEvaluationHistory>" << std::endl
02054 << reserve_evaluation_history << std::endl
02055 << "</ReserveEvaluationHistory>" << std::endl;
02056
02057 buffer << "<ReserveGradientHistory>" << std::endl
02058 << reserve_gradient_history << std::endl
02059 << "</ReserveGradientHistory>" << std::endl;
02060
02061 buffer << "<ReserveGradientNormHistory>" << std::endl
02062 << reserve_gradient_norm_history << std::endl
02063 << "</ReserveGradientNormHistory>" << std::endl;
02064
02065 buffer << "<ReserveInverseHessianHistory>" << std::endl
02066 << reserve_inverse_Hessian_history << std::endl
02067 << "</ReserveInverseHessianHistory>" << std::endl;
02068
02069 buffer << "<ReserveTrainingDirectionHistory>" << std::endl
02070 << reserve_training_direction_history << std::endl
02071 << "</ReserveTrainingDirectionHistory>" << std::endl;
02072
02073 buffer << "<ReserveTrainingRateHistory>" << std::endl
02074 << reserve_training_rate_history << std::endl
02075 << "</ReserveTrainingRateHistory>" << std::endl;
02076
02077 buffer << "<ReserveElapsedTimeHistory>" << std::endl
02078 << reserve_elapsed_time_history << std::endl
02079 << "</ReserveElapsedTimeHistory>" << std::endl;
02080
02081 buffer << "<ReserveValidationErrorHistory>" << std::endl
02082 << reserve_validation_error_history << std::endl
02083 << "</ReserveValidationErrorHistory>" << std::endl;
02084
02085
02086
02087 buffer << "<Display>" << std::endl
02088 << display << std::endl
02089 << "</Display>" << std::endl;
02090
02091 buffer << "<DisplayPeriod>" << std::endl
02092 << display_period << std::endl
02093 << "</DisplayPeriod>" << std::endl;
02094
02095 return(buffer.str());
02096 }
02097
02098
02099
02100
02102
02103 void TrainingAlgorithm::print(void)
02104 {
02105 std::cout << to_XML(true);
02106 }
02107
02108
02109
02110
02113
02114 void TrainingAlgorithm::save(const char* filename)
02115 {
02116 std::fstream file;
02117
02118 file.open(filename, std::ios::out);
02119
02120 if(!file.is_open())
02121 {
02122 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02123 << "void save(const char*) method." << std::endl
02124 << "Cannot open training algorithm XML-type file." << std::endl;
02125
02126 exit(1);
02127 }
02128
02129
02130
02131 file << to_XML(true);
02132
02133
02134
02135 file.close();
02136 }
02137
02138
02139
02140
02144
02145 void TrainingAlgorithm::load(const char* filename)
02146 {
02147
02148
02149 std::fstream file;
02150
02151 file.open(filename, std::ios::in);
02152
02153 if(!file.is_open())
02154 {
02155 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02156 << "void load(const char*) method." << std::endl
02157 << "Cannot open training algorithm object XML-type file." << std::endl;
02158
02159 exit(1);
02160 }
02161
02162 std::string word;
02163 std::string line;
02164
02165
02166
02167 getline(file, line);
02168
02169 if(line != "<Flood version='3.0' class='TrainingAlgorithm'>")
02170 {
02171
02172
02173
02174
02175
02176 }
02177
02178 while(!file.eof())
02179 {
02180 file >> word;
02181
02182
02183
02184 if(word == "<TrainingRateMethod>")
02185 {
02186 std::string new_training_rate_method;
02187
02188 file >> new_training_rate_method;
02189
02190 file >> word;
02191
02192 if(word != "</TrainingRateMethod>")
02193 {
02194 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02195 << "void load(const char*) method." << std::endl
02196 << "Unknown training rate method end tag: " << word << std::endl;
02197
02198 exit(1);
02199 }
02200
02201 set_training_rate_method(new_training_rate_method);
02202 }
02203
02204
02205
02206 else if(word == "<BracketingFactor>")
02207 {
02208 double new_backeting_factor;
02209
02210 file >> new_backeting_factor;
02211
02212 file >> word;
02213
02214 if(word != "</BracketingFactor>")
02215 {
02216 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02217 << "void load(const char*) method." << std::endl
02218 << "Unknown bracketing factor end tag: " << word << std::endl;
02219
02220 exit(1);
02221 }
02222
02223 set_bracketing_factor(new_backeting_factor);
02224 }
02225 else if(word == "<FirstTrainingRate>")
02226 {
02227 double new_first_training_rate;
02228
02229 file >> new_first_training_rate;
02230
02231 file >> word;
02232
02233 if(word != "</FirstTrainingRate>")
02234 {
02235 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02236 << "void load(const char*) method." << std::endl
02237 << "Unknown first training rate end tag: " << word << std::endl;
02238
02239 exit(1);
02240 }
02241
02242 set_first_training_rate(new_first_training_rate);
02243 }
02244 else if(word == "<TrainingRateTolerance>")
02245 {
02246 double new_training_rate_tolerance;
02247
02248 file >> new_training_rate_tolerance;
02249
02250 file >> word;
02251
02252 if(word != "</TrainingRateTolerance>")
02253 {
02254 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02255 << "void load(const char*) method." << std::endl
02256 << "Unknown training rate tolerance end tag: " << word << std::endl;
02257
02258 exit(1);
02259 }
02260
02261 set_training_rate_tolerance(new_training_rate_tolerance);
02262 }
02263
02264 else if(word == "<WarningParametersNorm>")
02265 {
02266 double new_warning_parameters_norm;
02267
02268 file >> new_warning_parameters_norm;
02269
02270 file >> word;
02271
02272 if(word != "</WarningParametersNorm>")
02273 {
02274 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02275 << "void load(const char*) method." << std::endl
02276 << "Unknown warning parameters norm end tag: " << word << std::endl;
02277
02278 exit(1);
02279 }
02280
02281 set_warning_parameters_norm(new_warning_parameters_norm);
02282 }
02283
02284 else if(word == "<WarningGradientNorm>")
02285 {
02286 double new_warning_gradient_norm;
02287
02288 file >> new_warning_gradient_norm;
02289
02290 file >> word;
02291
02292 if(word != "</WarningGradientNorm>")
02293 {
02294 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02295 << "void load(const char*) method." << std::endl
02296 << "Unknown warning gradient norm end tag: " << word << std::endl;
02297
02298 exit(1);
02299 }
02300
02301 set_warning_gradient_norm(new_warning_gradient_norm);
02302 }
02303
02304 else if(word == "<WarningTrainingRate>")
02305 {
02306 double new_warning_training_rate;
02307
02308 file >> new_warning_training_rate;
02309
02310 file >> word;
02311
02312 if(word != "</WarningTrainingRate>")
02313 {
02314 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02315 << "void load(const char*) method." << std::endl
02316 << "Unknown warning training rate end tag: " << word << std::endl;
02317
02318 exit(1);
02319 }
02320
02321 set_warning_training_rate(new_warning_training_rate);
02322 }
02323
02324 else if(word == "<ErrorParametersNorm>")
02325 {
02326 double new_error_parameters_norm;
02327
02328 file >> new_error_parameters_norm;
02329
02330 file >> word;
02331
02332 if(word != "</ErrorParametersNorm>")
02333 {
02334 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02335 << "void load(const char*) method." << std::endl
02336 << "Unknown error parameters norm end tag: " << word << std::endl;
02337
02338 exit(1);
02339 }
02340
02341 set_error_parameters_norm(new_error_parameters_norm);
02342 }
02343
02344 else if(word == "<ErrorGradientNorm>")
02345 {
02346 double new_error_gradient_norm;
02347
02348 file >> new_error_gradient_norm;
02349
02350 file >> word;
02351
02352 if(word != "</ErrorGradientNorm>")
02353 {
02354 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02355 << "void load(const char*) method." << std::endl
02356 << "Unknown error gradient norm end tag: " << word << std::endl;
02357
02358 exit(1);
02359 }
02360
02361 set_error_gradient_norm(new_error_gradient_norm);
02362 }
02363
02364 else if(word == "<ErrorTrainingRate>")
02365 {
02366 double new_error_training_rate;
02367
02368 file >> new_error_training_rate;
02369
02370 set_error_training_rate(new_error_training_rate);
02371
02372 file >> word;
02373
02374 if(word != "</ErrorTrainingRate>")
02375 {
02376 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02377 << "void load(const char*) method." << std::endl
02378 << "Unknown error training rate end tag: " << word << std::endl;
02379
02380 exit(1);
02381 }
02382 }
02383
02384
02385
02386 else if(word == "<MinimumParametersIncrementNorm>")
02387 {
02388 double new_minimum_parameters_increment_norm;
02389
02390 file >> new_minimum_parameters_increment_norm;
02391
02392 file >> word;
02393
02394 if(word != "</MinimumParametersIncrementNorm>")
02395 {
02396 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02397 << "void load(const char*) method." << std::endl
02398 << "Unknown minimum parameters increment norm end tag: " << word << std::endl;
02399
02400 exit(1);
02401 }
02402
02403 set_minimum_parameters_increment_norm(new_minimum_parameters_increment_norm);
02404 }
02405
02406 else if(word == "<MinimumEvaluationImprovement>")
02407 {
02408 double new_minimum_evaluation_improvement;
02409
02410 file >> new_minimum_evaluation_improvement;
02411
02412 file >> word;
02413
02414 if(word != "</MinimumEvaluationImprovement>")
02415 {
02416 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02417 << "void load(const char*) method." << std::endl
02418 << "Unknown minimum evaluation improvement end tag: " << word << std::endl;
02419
02420 exit(1);
02421 }
02422
02423 set_minimum_evaluation_improvement(new_minimum_evaluation_improvement);
02424 }
02425 else if(word == "<EvaluationGoal>")
02426 {
02427 double new_evaluation_goal;
02428
02429 file >> new_evaluation_goal;
02430
02431 file >> word;
02432
02433 if(word != "</EvaluationGoal>")
02434 {
02435 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02436 << "void load(const char*) method." << std::endl
02437 << "Unknown evaluation goal end tag: " << word << std::endl;
02438
02439 exit(1);
02440 }
02441
02442 set_evaluation_goal(new_evaluation_goal);
02443 }
02444 else if(word == "<GradientNormGoal>")
02445 {
02446 double new_gradient_norm_goal;
02447
02448 file >> new_gradient_norm_goal;
02449
02450 file >> word;
02451
02452 if(word != "</GradientNormGoal>")
02453 {
02454 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02455 << "void load(const char*) method." << std::endl
02456 << "Unknown gradient norm goal end tag: " << word << std::endl;
02457
02458 exit(1);
02459 }
02460
02461 set_gradient_norm_goal(new_gradient_norm_goal);
02462 }
02463 else if(word == "<MaximumEpochsNumber>")
02464 {
02465 int new_maximum_epochs_number;
02466
02467 file >> new_maximum_epochs_number;
02468
02469 file >> word;
02470
02471 if(word != "</MaximumEpochsNumber>")
02472 {
02473 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02474 << "void load(const char*) method." << std::endl
02475 << "Unknown maximum epochs number end tag: " << word << std::endl;
02476
02477 exit(1);
02478 }
02479
02480 set_maximum_epochs_number(new_maximum_epochs_number);
02481 }
02482 else if(word == "<MaximumTime>")
02483 {
02484 double new_maximum_time;
02485
02486 file >> new_maximum_time;
02487
02488 file >> word;
02489
02490 if(word != "</MaximumTime>")
02491 {
02492 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02493 << "void load(const char*) method." << std::endl
02494 << "Unknown maximum time end tag: " << word << std::endl;
02495
02496 exit(1);
02497 }
02498
02499 set_maximum_time(new_maximum_time);
02500 }
02501 else if(word == "<EarlyStopping>")
02502 {
02503 bool new_early_stopping;
02504
02505 file >> new_early_stopping;
02506
02507 file >> word;
02508
02509 if(word != "</EarlyStopping>")
02510 {
02511 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02512 << "void load(const char*) method." << std::endl
02513 << "Unknown early stopping end tag: " << word << std::endl;
02514
02515 exit(1);
02516 }
02517
02518 set_early_stopping(new_early_stopping);
02519 }
02520
02521
02522
02523 else if(word == "<ReserveParametersHistory>")
02524 {
02525 bool new_reserve_parameters_history;
02526
02527 file >> new_reserve_parameters_history;
02528
02529 file >> word;
02530
02531 if(word != "</ReserveParametersHistory>")
02532 {
02533 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02534 << "void load(const char*) method." << std::endl
02535 << "Unknown reserve parameters history end tag: " << word << std::endl;
02536
02537 exit(1);
02538 }
02539
02540 set_reserve_parameters_history(new_reserve_parameters_history);
02541 }
02542
02543 else if(word == "<ReserveParametersNormHistory>")
02544 {
02545 bool new_reserve_parameters_norm_history;
02546
02547 file >> new_reserve_parameters_norm_history;
02548
02549 file >> word;
02550
02551 if(word != "</ReserveParametersNormHistory>")
02552 {
02553 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02554 << "void load(const char*) method." << std::endl
02555 << "Unknown reserve parameters norm history end tag: " << word << std::endl;
02556
02557 exit(1);
02558 }
02559
02560 set_reserve_parameters_norm_history(new_reserve_parameters_norm_history);
02561 }
02562 else if(word == "<ReserveEvaluationHistory>")
02563 {
02564 bool new_reserve_evaluation_history;
02565
02566 file >> new_reserve_evaluation_history;
02567
02568 file >> word;
02569
02570 if(word != "</ReserveEvaluationHistory>")
02571 {
02572 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02573 << "void load(const char*) method." << std::endl
02574 << "Unknown reserve evaluation history end tag: " << word << std::endl;
02575
02576 exit(1);
02577 }
02578
02579 set_reserve_evaluation_history(new_reserve_evaluation_history);
02580 }
02581
02582 else if(word == "<ReserveGradientHistory>")
02583 {
02584 bool new_reserve_gradient_history;
02585
02586 file >> new_reserve_gradient_history;
02587
02588 file >> word;
02589
02590 if(word != "</ReserveGradientHistory>")
02591 {
02592 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02593 << "void load(const char*) method." << std::endl
02594 << "Unknown reserve gradient history end tag: " << word << std::endl;
02595
02596 exit(1);
02597 }
02598
02599 set_reserve_gradient_history(new_reserve_gradient_history);
02600 }
02601
02602 else if(word == "<ReserveGradientNormHistory>")
02603 {
02604 bool new_reserve_gradient_norm_history;
02605
02606 file >> new_reserve_gradient_norm_history;
02607
02608 file >> word;
02609
02610 if(word != "</ReserveGradientNormHistory>")
02611 {
02612 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02613 << "void load(const char*) method." << std::endl
02614 << "Unknown reserve gradient norm history end tag: " << word << std::endl;
02615
02616 exit(1);
02617 }
02618
02619 set_reserve_gradient_norm_history(new_reserve_gradient_norm_history);
02620 }
02621
02622 else if(word == "<ReserveInverseHessianHistory>")
02623 {
02624 bool new_reserve_inverse_Hessian_history;
02625
02626 file >> new_reserve_inverse_Hessian_history;
02627
02628 file >> word;
02629
02630 if(word != "</ReserveInverseHessianHistory>")
02631 {
02632 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02633 << "void load(const char*) method." << std::endl
02634 << "Unknown reserve inverse Hessian history end tag: " << word << std::endl;
02635
02636 exit(1);
02637 }
02638
02639 set_reserve_inverse_Hessian_history(new_reserve_inverse_Hessian_history);
02640 }
02641
02642 else if(word == "<ReserveTrainingDirectionHistory>")
02643 {
02644 bool new_reserve_training_direction_history;
02645
02646 file >> new_reserve_training_direction_history;
02647
02648 file >> word;
02649
02650 if(word != "</ReserveTrainingDirectionHistory>")
02651 {
02652 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02653 << "void load(const char*) method." << std::endl
02654 << "Unknown reserve training direction history end tag: " << word << std::endl;
02655
02656 exit(1);
02657 }
02658
02659 set_reserve_training_direction_history(new_reserve_training_direction_history);
02660 }
02661
02662 else if(word == "<ReserveTrainingRateHistory>")
02663 {
02664 bool new_reserve_training_rate_history;
02665
02666 file >> new_reserve_training_rate_history;
02667
02668 file >> word;
02669
02670 if(word != "</ReserveTrainingRateHistory>")
02671 {
02672 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02673 << "void load(const char*) method." << std::endl
02674 << "Unknown reserve training rate history end tag: " << word << std::endl;
02675
02676 exit(1);
02677 }
02678
02679 set_reserve_training_rate_history(new_reserve_training_rate_history);
02680 }
02681
02682 else if(word == "<ReserveElapsedTimeHistory>")
02683 {
02684 bool new_reserve_elapsed_time_history;
02685
02686 file >> new_reserve_elapsed_time_history;
02687
02688 file >> word;
02689
02690 if(word != "</ReserveElapsedTimeHistory>")
02691 {
02692 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02693 << "void load(const char*) method." << std::endl
02694 << "Unknown reserve elapsed time history end tag: " << word << std::endl;
02695
02696 exit(1);
02697 }
02698
02699 set_reserve_elapsed_time_history(new_reserve_elapsed_time_history);
02700 }
02701
02702 else if(word == "<ReserveValidationErrorHistory>")
02703 {
02704 bool new_reserve_validation_error_history;
02705
02706 file >> new_reserve_validation_error_history;
02707
02708 file >> word;
02709
02710 if(word != "</ReserveValidationErrorHistory>")
02711 {
02712 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02713 << "void load(const char*) method." << std::endl
02714 << "Unknown reserve validation error history end tag: " << word << std::endl;
02715
02716 exit(1);
02717 }
02718
02719 set_reserve_validation_error_history(new_reserve_validation_error_history);
02720 }
02721
02722
02723
02724 else if(word == "<DisplayPeriod>")
02725 {
02726 int new_display_period;
02727
02728 file >> new_display_period;
02729
02730 file >> word;
02731
02732 if(word != "</DisplayPeriod>")
02733 {
02734 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02735 << "void load(const char*) method." << std::endl
02736 << "Unknown display period end tag: " << word << std::endl;
02737
02738 exit(1);
02739 }
02740
02741 set_display_period(new_display_period);
02742 }
02743
02744 else if(word == "<Display>")
02745 {
02746 bool new_display;
02747
02748 file >> new_display;
02749
02750 file >> word;
02751
02752 if(word != "</Display>")
02753 {
02754 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
02755 << "void load(const char*) method." << std::endl
02756 << "Unknown display end tag: " << word << std::endl;
02757
02758 exit(1);
02759 }
02760
02761 set_display(new_display);
02762 }
02763
02764 else
02765 {
02766
02767
02768
02769
02770
02771 }
02772 }
02773
02774
02775
02776 file.close();
02777 }
02778
02779
02780
02781
02786
02787 void TrainingAlgorithm::set_reserve_all_training_history(bool new_reserve_all_training_history)
02788 {
02789 reserve_parameters_history = new_reserve_all_training_history;
02790 reserve_parameters_norm_history = new_reserve_all_training_history;
02791
02792 reserve_evaluation_history = new_reserve_all_training_history;
02793 reserve_gradient_history = new_reserve_all_training_history;
02794 reserve_gradient_norm_history = new_reserve_all_training_history;
02795 reserve_inverse_Hessian_history = new_reserve_all_training_history;
02796
02797 reserve_validation_error_history = new_reserve_all_training_history;
02798
02799 reserve_training_direction_history = new_reserve_all_training_history;
02800 reserve_training_rate_history = new_reserve_all_training_history;
02801 reserve_elapsed_time_history = new_reserve_all_training_history;
02802 }
02803
02804
02805
02806
02809
02810 void TrainingAlgorithm::resize_training_history(int new_size)
02811 {
02812
02813
02814 if(reserve_parameters_history)
02815 {
02816 parameters_history.resize(new_size);
02817 }
02818
02819 if(reserve_parameters_norm_history)
02820 {
02821 parameters_norm_history.resize(new_size);
02822 }
02823
02824
02825
02826 if(reserve_evaluation_history)
02827 {
02828 evaluation_history.resize(new_size);
02829 }
02830
02831 if(reserve_gradient_history)
02832 {
02833 gradient_history.resize(new_size);
02834 }
02835
02836 if(reserve_gradient_norm_history)
02837 {
02838 gradient_norm_history.resize(new_size);
02839 }
02840
02841 if(reserve_inverse_Hessian_history)
02842 {
02843 inverse_Hessian_history.resize(new_size);
02844 }
02845
02846 if(reserve_validation_error_history)
02847 {
02848 validation_error_history.resize(new_size);
02849 }
02850
02851
02852
02853 if(reserve_training_direction_history)
02854 {
02855 training_direction_history.resize(new_size);
02856 }
02857
02858 if(reserve_training_rate_history)
02859 {
02860 training_rate_history.resize(new_size);
02861 }
02862
02863 if(reserve_elapsed_time_history)
02864 {
02865 elapsed_time_history.resize(new_size);
02866 }
02867 }
02868
02869
02870
02871
02875
02876 std::string TrainingAlgorithm::get_training_history_XML(bool show_declaration)
02877 {
02878 std::stringstream buffer;
02879
02880
02881
02882 if(show_declaration)
02883 {
02884 buffer << "<Flood version='3.0' class='TrainingAlgorithm' content='TrainingHistory'>" << std::endl;
02885 }
02886
02887
02888
02889 if(reserve_parameters_history)
02890 {
02891 buffer << "<ParametersHistory>" << std::endl
02892 << parameters_history << std::endl
02893 << "</ParametersHistory>" << std::endl;
02894 }
02895
02896 if(reserve_parameters_norm_history)
02897 {
02898 buffer << "<ParametersNormHistory>" << std::endl
02899 << parameters_norm_history << std::endl
02900 << "</ParametersNormHistory>" << std::endl;
02901 }
02902
02903
02904
02905 if(reserve_evaluation_history)
02906 {
02907 buffer << "<EvaluationHistory>" << std::endl
02908 << evaluation_history << std::endl
02909 << "</EvaluationHistory>" << std::endl;
02910 }
02911
02912 if(reserve_gradient_history)
02913 {
02914 buffer << "<GradientHistory>" << std::endl
02915 << gradient_history << std::endl
02916 << "</GradientHistory>" << std::endl;
02917 }
02918
02919 if(reserve_gradient_norm_history)
02920 {
02921 buffer << "<GradientNormHistory>" << std::endl
02922 << gradient_norm_history << std::endl
02923 << "</GradientNormHistory>" << std::endl;
02924 }
02925
02926 if(reserve_inverse_Hessian_history)
02927 {
02928 buffer << "<InverseHessianHistory>" << std::endl;
02929
02930 int size = inverse_Hessian_history.get_size();
02931
02932 for(int i = 0; i < size; i++)
02933 {
02934 buffer << "<InverseHessian>" << std::endl
02935 << inverse_Hessian_history[i] << std::endl
02936 << "</InverseHessian>" << std::endl;
02937 }
02938
02939 buffer << "</InverseHessianHistory>" << std::endl;
02940 }
02941
02942
02943
02944 if(reserve_training_direction_history)
02945 {
02946 buffer << "<TrainingDirectionHistory>" << std::endl
02947 << training_direction_history << std::endl
02948 << "</TrainingDirectionHistory>" << std::endl;
02949 }
02950
02951 if(reserve_training_rate_history)
02952 {
02953 buffer << "<TrainingRateHistory>" << std::endl
02954 << training_rate_history << std::endl
02955 << "</TrainingRateHistory>" << std::endl;
02956 }
02957
02958 if(reserve_elapsed_time_history)
02959 {
02960 buffer << "<ElapsedTimeHistory>" << std::endl
02961 << elapsed_time_history << std::endl
02962 << "</ElapsedTimeHistory>" << std::endl;
02963 }
02964
02965 if(reserve_validation_error_history)
02966 {
02967 buffer << "<ValidationErrorHistory>" << std::endl
02968 << validation_error_history << std::endl
02969 << "</ValidationErrorHistory>" << std::endl;
02970 }
02971
02972 return(buffer.str());
02973 }
02974
02975
02976
02977
02979
02980 void TrainingAlgorithm::print_training_history(void)
02981 {
02982 std::cout << get_training_history_XML(true);
02983 }
02984
02985
02986
02987
02990
02991 void TrainingAlgorithm::save_training_history(const char* filename)
02992 {
02993 std::fstream file;
02994
02995
02996
02997 file.open(filename, std::ios::out);
02998
02999 if(!file.is_open())
03000 {
03001 std::cerr << "Flood Error: TrainingAlgorithm class." << std::endl
03002 << "void save_training_history(const char*) method." << std::endl
03003 << "Cannot open training history XML-type file." << std::endl;
03004
03005 exit(1);
03006 }
03007
03008
03009
03010 file << get_training_history_XML(true);
03011
03012
03013
03014 file.close();
03015
03016 }
03017
03018 }
03019
03020
03021
03022
03023
03024
03025
03026
03027
03028
03029
03030
03031
03032
03033
03034
03035
03036