00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016 #include "GradientDescent.h"
00017
00018 #include <string>
00019 #include <sstream>
00020 #include <iostream>
00021 #include <fstream>
00022 #include <algorithm>
00023 #include <functional>
00024 #include <limits>
00025 #include <cmath>
00026 #include <ctime>
00027
00028 namespace Flood
00029 {
00030
00031
00032
00066
00067 GradientDescent::GradientDescent(ObjectiveFunctional* new_objective_functional_pointer)
00068 : TrainingAlgorithm(new_objective_functional_pointer)
00069 {
00070 }
00071
00072
00073
00074
00106
00107 GradientDescent::GradientDescent(void) : TrainingAlgorithm()
00108 {
00109 }
00110
00111
00112
00113
00115
00116 GradientDescent::~GradientDescent(void)
00117 {
00118 }
00119
00120
00121
00122
00123
00124
00138
00139 void GradientDescent::set_reserve_all_training_history(bool new_reserve_all_training_history)
00140 {
00141
00142
00143 reserve_parameters_history = new_reserve_all_training_history;
00144 reserve_parameters_norm_history = new_reserve_all_training_history;
00145
00146
00147
00148 reserve_evaluation_history = new_reserve_all_training_history;
00149 reserve_gradient_history = new_reserve_all_training_history;
00150 reserve_gradient_norm_history = new_reserve_all_training_history;
00151
00152 reserve_validation_error_history = new_reserve_all_training_history;
00153
00154
00155
00156 reserve_training_direction_history = new_reserve_all_training_history;
00157 reserve_training_rate_history = new_reserve_all_training_history;
00158
00159 reserve_elapsed_time_history = new_reserve_all_training_history;
00160 }
00161
00162
00163
00164
00165 Vector<double> GradientDescent::calculate_training_direction(const Vector<double>& gradient)
00166 {
00167 double gradient_norm = gradient.calculate_norm();
00168
00169 return(gradient*(-1.0)/gradient_norm);
00170 }
00171
00172
00173
00174
00175
00179
00180 void GradientDescent::train(void)
00181 {
00182
00183
00184 #ifdef _DEBUG
00185
00186 if(objective_functional_pointer == NULL)
00187 {
00188 std::cerr << "Flood Error: GradientDescent class." << std::endl
00189 << "void train(void) method." << std::endl
00190 << "Pointer to objective functional object cannot be NULL." << std::endl;
00191
00192 exit(1);
00193 }
00194
00195 #endif
00196
00197
00198
00199 if(display)
00200 {
00201 std::cout << "Training with gradient descent..." << std::endl;
00202 }
00203
00204
00205
00206 MultilayerPerceptron* multilayer_perceptron_pointer = objective_functional_pointer->get_multilayer_perceptron_pointer();
00207
00208 int parameters_number = multilayer_perceptron_pointer->get_parameters_number();
00209
00210 Vector<double> parameters(parameters_number);
00211 double parameters_norm;
00212
00213 Vector<double> parameters_increment(parameters_number);
00214 double parameters_increment_norm;
00215
00216
00217
00218 double validation_error = 0.0;
00219 double old_validation_error = 0.0;
00220 double validation_error_increment = 0.0;
00221
00222 double evaluation = 0.0;
00223 double old_evaluation = 0.0;
00224 double evaluation_improvement = 0.0;
00225
00226 Vector<double> gradient(parameters_number);
00227 double gradient_norm;
00228
00229
00230
00231 Vector<double> training_direction(parameters_number);
00232
00233 double initial_training_rate = 0.0;
00234 double training_rate = 0.0;
00235 double old_training_rate = 0.0;
00236
00237 Vector<double> training_rate_evaluation(2);
00238
00239 bool stop_training = false;
00240
00241 time_t beginning_time, current_time;
00242 time(&beginning_time);
00243 double elapsed_time;
00244
00245 resize_training_history(maximum_epochs_number+1);
00246
00247 for(int epoch = 0; epoch <= maximum_epochs_number; epoch++)
00248 {
00249
00250
00251 parameters = multilayer_perceptron_pointer->get_parameters();
00252
00253 parameters_norm = parameters.calculate_norm();
00254
00255 if(display && parameters_norm >= warning_parameters_norm)
00256 {
00257 std::cout << "Flood Warning: Parameters norm is " << parameters_norm << "." << std::endl;
00258 }
00259
00260
00261
00262 if(epoch == 0)
00263 {
00264 evaluation = objective_functional_pointer->calculate_evaluation();
00265 evaluation_improvement = 0.0;
00266 }
00267 else
00268 {
00269 evaluation = training_rate_evaluation[1];
00270 evaluation_improvement = old_evaluation - evaluation;
00271 }
00272
00273 validation_error = objective_functional_pointer->calculate_validation_error();
00274
00275 if(epoch == 0)
00276 {
00277 validation_error_increment = 0.0;
00278 }
00279 else
00280 {
00281 validation_error_increment = validation_error - old_validation_error;
00282 }
00283
00284 gradient = objective_functional_pointer->calculate_gradient();
00285
00286 gradient_norm = gradient.calculate_norm();
00287
00288 if(display && gradient_norm >= warning_gradient_norm)
00289 {
00290 std::cout << "Flood Warning: Gradient norm is " << gradient_norm << "." << std::endl;
00291 }
00292
00293
00294
00295 training_direction = calculate_training_direction(gradient);
00296
00297 if(epoch == 0)
00298 {
00299 initial_training_rate = first_training_rate;
00300 }
00301 else
00302 {
00303 initial_training_rate = old_training_rate;
00304 }
00305
00306 training_rate_evaluation
00307 = calculate_training_rate_evaluation(evaluation, training_direction, initial_training_rate);
00308
00309 training_rate = training_rate_evaluation[0];
00310
00311 parameters_increment = training_direction*training_rate;
00312 parameters_increment_norm = parameters_increment.calculate_norm();
00313
00314
00315
00316 time(¤t_time);
00317 elapsed_time = difftime(current_time, beginning_time);
00318
00319
00320
00321 if(reserve_parameters_history)
00322 {
00323 parameters_history[epoch] = parameters;
00324 }
00325
00326 if(reserve_parameters_norm_history)
00327 {
00328 parameters_norm_history[epoch] = parameters_norm;
00329 }
00330
00331
00332
00333 if(reserve_evaluation_history)
00334 {
00335 evaluation_history[epoch] = evaluation;
00336 }
00337
00338 if(reserve_validation_error_history)
00339 {
00340 validation_error_history[epoch] = validation_error;
00341 }
00342
00343 if(reserve_gradient_history)
00344 {
00345 gradient_history[epoch] = gradient;
00346 }
00347
00348 if(reserve_gradient_norm_history)
00349 {
00350 gradient_norm_history[epoch] = gradient_norm;
00351 }
00352
00353
00354
00355 if(reserve_training_direction_history)
00356 {
00357 training_direction_history[epoch] = training_direction;
00358 }
00359
00360 if(reserve_training_rate_history)
00361 {
00362 training_rate_history[epoch] = training_rate;
00363 }
00364
00365 if(reserve_elapsed_time_history)
00366 {
00367 elapsed_time_history[epoch] = elapsed_time;
00368 }
00369
00370
00371
00372 if(parameters_increment_norm <= minimum_parameters_increment_norm)
00373 {
00374 if(display)
00375 {
00376 std::cout << "Epoch " << epoch << ": Minimum parameters increment norm reached." << std::endl;
00377 std::cout << "Parameters increment norm: " << parameters_increment_norm << std::endl;
00378 }
00379
00380 stop_training = true;
00381 }
00382
00383 else if(epoch != 0 && evaluation_improvement <= minimum_evaluation_improvement)
00384 {
00385 if(display)
00386 {
00387 std::cout << "Epoch " << epoch << ": Minimum evaluation improvement reached." << std::endl
00388 << "Evaluation improvement: " << evaluation_improvement << std::endl;
00389 }
00390
00391 stop_training = true;
00392 }
00393
00394 else if(evaluation <= evaluation_goal)
00395 {
00396 if(display)
00397 {
00398 std::cout << "Epoch " << epoch << ": Evaluation goal reached." << std::endl;
00399 }
00400
00401 stop_training = true;
00402 }
00403
00404 else if(early_stopping && epoch != 0 && validation_error_increment > 0.0)
00405 {
00406 if(display)
00407 {
00408 std::cout << "Epoch " << epoch << ": Validation error stopped improving." << std::endl;
00409 std::cout << "Validation error increment: " << validation_error_increment << std::endl;
00410 }
00411
00412 stop_training = true;
00413 }
00414
00415 else if(gradient_norm <= gradient_norm_goal)
00416 {
00417 if(display)
00418 {
00419 std::cout << "Epoch " << epoch << ": Gradient norm goal reached." << std::endl;
00420 }
00421
00422 stop_training = true;
00423 }
00424
00425 else if(epoch == maximum_epochs_number)
00426 {
00427 if(display)
00428 {
00429 std::cout << "Epoch " << epoch << ": Maximum number of epochs reached." << std::endl;
00430 }
00431
00432 stop_training = true;
00433 }
00434
00435 else if(elapsed_time >= maximum_time)
00436 {
00437 if(display)
00438 {
00439 std::cout << "Epoch " << epoch << ": Maximum training time reached." << std::endl;
00440 }
00441
00442 stop_training = true;
00443 }
00444
00445 if(stop_training)
00446 {
00447 if(display)
00448 {
00449 std::cout << "Parameters norm: " << parameters_norm << std::endl;
00450
00451 std::cout << "Evaluation: " << evaluation << std::endl;
00452
00453 if(validation_error != 0)
00454 {
00455 std::cout << "Validation error: " << validation_error << std::endl;
00456 }
00457
00458 std::cout << "Gradient norm: " << gradient_norm << std::endl;
00459
00460 objective_functional_pointer->print_information();
00461
00462 std::cout << "Training rate: " << training_rate << std::endl
00463 << "Elapsed time: " << elapsed_time << std::endl;
00464 }
00465
00466 resize_training_history(1+epoch);
00467
00468 break;
00469 }
00470 else if(display && epoch % display_period == 0)
00471 {
00472 std::cout << "Epoch " << epoch << ";" << std::endl;
00473
00474 std::cout << "Parameters norm: " << parameters_norm << std::endl;
00475
00476 std::cout << "Evaluation: " << evaluation << std::endl;
00477
00478 if(validation_error != 0)
00479 {
00480 std::cout << "Validation error: " << validation_error << std::endl;
00481 }
00482
00483 std::cout << "Gradient norm: " << gradient_norm << std::endl;
00484
00485 objective_functional_pointer->print_information();
00486
00487 std::cout << "Training rate: " << training_rate << std::endl
00488 << "Elapsed time: " << elapsed_time << std::endl;
00489 }
00490
00491
00492
00493 parameters += parameters_increment;
00494
00495 multilayer_perceptron_pointer->set_parameters(parameters);
00496
00497
00498
00499 old_evaluation = evaluation;
00500 old_validation_error = validation_error;
00501
00502 old_training_rate = training_rate;
00503 }
00504
00505 }
00506
00507
00508
00509
00513
00514 std::string GradientDescent::to_XML(bool show_declaration)
00515 {
00516 std::stringstream buffer;
00517
00518 if(show_declaration)
00519 {
00520 buffer << "<Flood version='3.0' class='GradientDescent'>" << std::endl;
00521 }
00522
00523
00524
00525 buffer << "<TrainingRateMethod>" << std::endl
00526 << get_training_rate_method_name() << std::endl
00527 << "</TrainingRateMethod>" << std::endl;
00528
00529
00530
00531 buffer << "<FirstTrainingRate> " << std::endl
00532 << first_training_rate << std::endl
00533 << "</FirstTrainingRate> " << std::endl;
00534
00535 buffer << "<TrainingRateTolerance>" << std::endl
00536 << training_rate_tolerance << std::endl
00537 << "</TrainingRateTolerance>" << std::endl;
00538
00539 buffer << "<WarningParametersNorm>" << std::endl
00540 << warning_parameters_norm << std::endl
00541 << "</WarningParametersNorm>" << std::endl;
00542
00543 buffer << "<WarningGradientNorm>" << std::endl
00544 << warning_gradient_norm << std::endl
00545 << "</WarningGradientNorm>" << std::endl;
00546
00547 buffer << "<WarningTrainingRate>" << std::endl
00548 << warning_training_rate << std::endl
00549 << "</WarningTrainingRate>" << std::endl;
00550
00551 buffer << "<ErrorParametersNorm>" << std::endl
00552 << error_parameters_norm << std::endl
00553 << "</ErrorParametersNorm>" << std::endl;
00554
00555 buffer << "<ErrorGradientNorm>" << std::endl
00556 << error_gradient_norm << std::endl
00557 << "</ErrorGradientNorm>" << std::endl;
00558
00559 buffer << "<ErrorTrainingRate>" << std::endl
00560 << error_training_rate << std::endl
00561 << "</ErrorTrainingRate>" << std::endl;
00562
00563
00564
00565 buffer << "<MinimumParametersIncrementNorm>" << std::endl
00566 << minimum_parameters_increment_norm << std::endl
00567 << "</MinimumParametersIncrementNorm>" << std::endl;
00568
00569 buffer << "<MinimumEvaluationImprovement>" << std::endl
00570 << minimum_evaluation_improvement << std::endl
00571 << "</MinimumEvaluationImprovement>" << std::endl;
00572
00573 buffer << "<EvaluationGoal>" << std::endl
00574 << evaluation_goal << std::endl
00575 << "</EvaluationGoal>" << std::endl;
00576
00577 buffer << "<EarlyStopping>" << std::endl
00578 << early_stopping << std::endl
00579 << "</EarlyStopping>" << std::endl;
00580
00581 buffer << "<GradientNormGoal>" << std::endl
00582 << gradient_norm_goal << std::endl
00583 << "</GradientNormGoal>" << std::endl;
00584
00585 buffer << "<MaximumEpochsNumber>" << std::endl
00586 << maximum_epochs_number << std::endl
00587 << "</MaximumEpochsNumber>" << std::endl;
00588
00589 buffer << "<MaximumTime>" << std::endl
00590 << maximum_time << std::endl
00591 << "</MaximumTime>" << std::endl;
00592
00593
00594
00595 buffer << "<ReserveParametersHistory>" << std::endl
00596 << reserve_parameters_history << std::endl
00597 << "</ReserveParametersHistory>" << std::endl;
00598
00599 buffer << "<ReserveParametersNormHistory>" << std::endl
00600 << reserve_parameters_norm_history << std::endl
00601 << "</ReserveParametersNormHistory>" << std::endl;
00602
00603 buffer << "<ReserveEvaluationHistory>" << std::endl
00604 << reserve_evaluation_history << std::endl
00605 << "</ReserveEvaluationHistory>" << std::endl;
00606
00607 buffer << "<ReserveGradientHistory>" << std::endl
00608 << reserve_gradient_history << std::endl
00609 << "</ReserveGradientHistory>" << std::endl;
00610
00611 buffer << "<ReserveGradientNormHistory>" << std::endl
00612 << reserve_gradient_norm_history << std::endl
00613 << "</ReserveGradientNormHistory>" << std::endl;
00614
00615 buffer << "<ReserveValidationErrorHistory>" << std::endl
00616 << reserve_validation_error_history << std::endl
00617 << "</ReserveValidationErrorHistory>" << std::endl;
00618
00619 buffer << "<ReserveTrainingDirectionHistory>" << std::endl
00620 << reserve_training_direction_history << std::endl
00621 << "</ReserveTrainingDirectionHistory>" << std::endl;
00622
00623 buffer << "<ReserveTrainingRateHistory>" << std::endl
00624 << reserve_training_rate_history << std::endl
00625 << "</ReserveTrainingRateHistory>" << std::endl;
00626
00627 buffer << "<ReserveElapsedTimeHistory>" << std::endl
00628 << reserve_elapsed_time_history << std::endl
00629 << "</ReserveElapsedTimeHistory>" << std::endl;
00630
00631
00632
00633 buffer << "<Display>" << std::endl
00634 << display << std::endl
00635 << "</Display>" << std::endl;
00636
00637 buffer << "<DisplayPeriod>" << std::endl
00638 << display_period << std::endl
00639 << "</DisplayPeriod>" << std::endl;
00640
00641 return(buffer.str());
00642 }
00643
00644
00645
00646
00647 std::string GradientDescent::get_training_history_XML(bool show_declaration)
00648 {
00649 std::stringstream buffer;
00650
00651 if(show_declaration)
00652 {
00653 buffer << "<Flood version='3.0' class='GradientDescent' content='TrainingHistory'>" << std::endl;
00654 }
00655
00656 if(reserve_parameters_history)
00657 {
00658 buffer << "<ParametersHistory>" << std::endl
00659 << parameters_history << std::endl
00660 << "</ParametersHistory>" << std::endl;
00661 }
00662
00663 if(reserve_parameters_norm_history)
00664 {
00665 buffer << "<ParametersNormHistory>" << std::endl
00666 << parameters_norm_history << std::endl
00667 << "</ParametersNormHistory>" << std::endl;
00668 }
00669
00670 if(reserve_evaluation_history)
00671 {
00672 buffer << "<EvaluationHistory>" << std::endl
00673 << evaluation_history << std::endl
00674 << "</EvaluationHistory>" << std::endl;
00675 }
00676
00677 if(reserve_gradient_history)
00678 {
00679 buffer << "<GradientHistory>" << std::endl
00680 << gradient_history << std::endl
00681 << "</GradientHistory>" << std::endl;
00682 }
00683
00684 if(reserve_gradient_norm_history)
00685 {
00686 buffer << "<GradientNormHistory>" << std::endl
00687 << gradient_norm_history << std::endl
00688 << "</GradientNormHistory>" << std::endl;
00689 }
00690
00691 if(reserve_validation_error_history)
00692 {
00693 buffer << "<ValidationErrorHistory>" << std::endl
00694 << validation_error_history << std::endl
00695 << "</ValidationErrorHistory>" << std::endl;
00696 }
00697
00698 if(reserve_training_direction_history)
00699 {
00700 buffer << "<TrainingDirectionHistory>" << std::endl
00701 << training_direction_history << std::endl
00702 << "</TrainingDirectionHistory>" << std::endl;
00703 }
00704
00705 if(reserve_training_rate_history)
00706 {
00707 buffer << "<TrainingRateHistory>" << std::endl
00708 << training_rate_history << std::endl
00709 << "</TrainingRateHistory>" << std::endl;
00710 }
00711
00712 if(reserve_elapsed_time_history)
00713 {
00714 buffer << "<ElapsedTimeHistory>" << std::endl
00715 << elapsed_time_history << std::endl
00716 << "</ElapsedTimeHistory>" << std::endl;
00717 }
00718
00719 return(buffer.str());
00720 }
00721
00722
00723
00724
00729
00730 void GradientDescent::load(const char* filename)
00731 {
00732
00733
00734 std::fstream file;
00735
00736 file.open(filename, std::ios::in);
00737
00738 if(!file.is_open())
00739 {
00740 std::cerr << "Flood Error: GradientDescent class." << std::endl
00741 << "void load(const char*) method." << std::endl
00742 << "Cannot open conjugate gradient object XML-type file." << std::endl;
00743
00744 exit(1);
00745 }
00746
00747 std::string word;
00748 std::string line;
00749
00750
00751
00752 getline(file, line);
00753
00754 if(line != "<Flood version='3.0' class='GradientDescent'>")
00755 {
00756 std::cerr << "Flood Error: GradientDescent class." << std::endl
00757 << "void load(const char*) method." << std::endl
00758 << "Unknown file declaration: " << line << std::endl;
00759
00760 exit(1);
00761 }
00762
00763 while(!file.eof())
00764 {
00765 file >> word;
00766
00767
00768
00769 if(word == "<TrainingRateMethod>")
00770 {
00771 std::string new_training_rate_method;
00772
00773 file >> new_training_rate_method;
00774
00775 set_training_rate_method(new_training_rate_method);
00776
00777 file >> word;
00778
00779 if(word != "</TrainingRateMethod>")
00780 {
00781 std::cerr << "Flood Error: GradientDescent class." << std::endl
00782 << "void load(const char*) method." << std::endl
00783 << "Unknown training rate method end tag: " << word << std::endl;
00784
00785 exit(1);
00786 }
00787 }
00788
00789
00790
00791 else if(word == "<BracketingFactor>")
00792 {
00793 double new_backeting_factor;
00794
00795 file >> new_backeting_factor;
00796
00797 set_bracketing_factor(new_backeting_factor);
00798
00799 file >> word;
00800
00801 if(word != "</BracketingFactor>")
00802 {
00803 std::cerr << "Flood Error: GradientDescent class." << std::endl
00804 << "void load(const char*) method." << std::endl
00805 << "Unknown bracketing factor end tag: " << word << std::endl;
00806
00807 exit(1);
00808 }
00809 }
00810 else if(word == "<FirstTrainingRate>")
00811 {
00812 double new_first_training_rate;
00813
00814 file >> new_first_training_rate;
00815
00816 set_first_training_rate(new_first_training_rate);
00817
00818 file >> word;
00819
00820 if(word != "</FirstTrainingRate>")
00821 {
00822 std::cerr << "Flood Error: GradientDescent class." << std::endl
00823 << "void load(const char*) method." << std::endl
00824 << "Unknown first training rate end tag: " << word << std::endl;
00825
00826 exit(1);
00827 }
00828 }
00829 else if(word == "<TrainingRateTolerance>")
00830 {
00831 double new_training_rate_tolerance;
00832
00833 file >> new_training_rate_tolerance;
00834
00835 set_training_rate_tolerance(new_training_rate_tolerance);
00836
00837 file >> word;
00838
00839 if(word != "</TrainingRateTolerance>")
00840 {
00841 std::cerr << "Flood Error: GradientDescent class." << std::endl
00842 << "void load(const char*) method." << std::endl
00843 << "Unknown training rate tolerance end tag: " << word << std::endl;
00844
00845 exit(1);
00846 }
00847 }
00848 else if(word == "<WarningTrainingRate>")
00849 {
00850 double new_warning_training_rate;
00851
00852 file >> new_warning_training_rate;
00853
00854 set_warning_training_rate(new_warning_training_rate);
00855
00856 file >> word;
00857
00858 if(word != "</WarningTrainingRate>")
00859 {
00860 std::cerr << "Flood Error: GradientDescent class." << std::endl
00861 << "void load(const char*) method." << std::endl
00862 << "Unknown warning training rate end tag: " << word << std::endl;
00863
00864 exit(1);
00865 }
00866 }
00867 else if(word == "<ErrorTrainingRate>")
00868 {
00869 double new_error_training_rate;
00870
00871 file >> new_error_training_rate;
00872
00873 set_error_training_rate(new_error_training_rate);
00874
00875 file >> word;
00876
00877 if(word != "</ErrorTrainingRate>")
00878 {
00879 std::cerr << "Flood Error: GradientDescent class." << std::endl
00880 << "void load(const char*) method." << std::endl
00881 << "Unknown error training rate end tag: " << word << std::endl;
00882
00883 exit(1);
00884 }
00885 }
00886
00887
00888
00889 else if(word == "<MinimumParametersIncrementNorm>")
00890 {
00891 double new_minimum_parameters_increment_norm;
00892
00893 file >> new_minimum_parameters_increment_norm;
00894
00895 set_minimum_parameters_increment_norm(new_minimum_parameters_increment_norm);
00896
00897 file >> word;
00898
00899 if(word != "</MinimumParametersIncrementNorm>")
00900 {
00901 std::cerr << "Flood Error: GradientDescent class." << std::endl
00902 << "void load(const char*) method." << std::endl
00903 << "Unknown minimum parameters increment norm end tag: " << word << std::endl;
00904
00905 exit(1);
00906 }
00907 }
00908 else if(word == "<MinimumEvaluationImprovement>")
00909 {
00910 double new_minimum_evaluation_improvement;
00911
00912 file >> new_minimum_evaluation_improvement;
00913
00914 set_minimum_evaluation_improvement(new_minimum_evaluation_improvement);
00915
00916 file >> word;
00917
00918 if(word != "</MinimumEvaluationImprovement>")
00919 {
00920 std::cerr << "Flood Error: GradientDescent class." << std::endl
00921 << "void load(const char*) method." << std::endl
00922 << "Unknown minimum evaluation improvement end tag: " << word << std::endl;
00923
00924 exit(1);
00925 }
00926 }
00927 else if(word == "<EvaluationGoal>")
00928 {
00929 double new_evaluation_goal;
00930
00931 file >> new_evaluation_goal;
00932
00933 set_evaluation_goal(new_evaluation_goal);
00934
00935 file >> word;
00936
00937 if(word != "</EvaluationGoal>")
00938 {
00939 std::cerr << "Flood Error: GradientDescent class." << std::endl
00940 << "void load(const char*) method." << std::endl
00941 << "Unknown evaluation goal end tag: " << word << std::endl;
00942
00943 exit(1);
00944 }
00945 }
00946 else if(word == "<GradientNormGoal>")
00947 {
00948 double new_gradient_norm_goal;
00949
00950 file >> new_gradient_norm_goal;
00951
00952 set_gradient_norm_goal(new_gradient_norm_goal);
00953
00954 file >> word;
00955
00956 if(word != "</GradientNormGoal>")
00957 {
00958 std::cerr << "Flood Error: GradientDescent class." << std::endl
00959 << "void load(const char*) method." << std::endl
00960 << "Unknown gradient norm goal end tag: " << word << std::endl;
00961
00962 exit(1);
00963 }
00964 }
00965 else if(word == "<MaximumEpochsNumber>")
00966 {
00967 int new_maximum_epochs_number;
00968
00969 file >> new_maximum_epochs_number;
00970
00971 set_maximum_epochs_number(new_maximum_epochs_number);
00972
00973 file >> word;
00974
00975 if(word != "</MaximumEpochsNumber>")
00976 {
00977 std::cerr << "Flood Error: GradientDescent class." << std::endl
00978 << "void load(const char*) method." << std::endl
00979 << "Unknown maximum epochs number end tag: " << word << std::endl;
00980
00981 exit(1);
00982 }
00983 }
00984 else if(word == "<MaximumTime>")
00985 {
00986 double new_maximum_time;
00987
00988 file >> new_maximum_time;
00989
00990 set_maximum_time(new_maximum_time);
00991
00992 file >> word;
00993
00994 if(word != "</MaximumTime>")
00995 {
00996 std::cerr << "Flood Error: GradientDescent class." << std::endl
00997 << "void load(const char*) method." << std::endl
00998 << "Unknown maximum time end tag: " << word << std::endl;
00999
01000 exit(1);
01001 }
01002 }
01003 else if(word == "<EarlyStopping>")
01004 {
01005 bool new_early_stopping;
01006
01007 file >> new_early_stopping;
01008
01009 set_early_stopping(new_early_stopping);
01010
01011 file >> word;
01012
01013 if(word != "</EarlyStopping>")
01014 {
01015 std::cerr << "Flood Error: GradientDescent class." << std::endl
01016 << "void load(const char*) method." << std::endl
01017 << "Unknown early stopping end tag: " << word << std::endl;
01018
01019 exit(1);
01020 }
01021 }
01022
01023
01024
01025 else if(word == "<ReserveParametersHistory>")
01026 {
01027 bool new_reserve_parameters_history;
01028
01029 file >> new_reserve_parameters_history;
01030
01031 set_reserve_parameters_history(new_reserve_parameters_history);
01032
01033 file >> word;
01034
01035 if(word != "</ReserveParametersHistory>")
01036 {
01037 std::cerr << "Flood Error: GradientDescent class." << std::endl
01038 << "void load(const char*) method." << std::endl
01039 << "Unknown reserve parameters history end tag: " << word << std::endl;
01040
01041 exit(1);
01042 }
01043 }
01044 else if(word == "<ReserveParametersNormHistory>")
01045 {
01046 bool new_reserve_parameters_norm_history;
01047
01048 file >> new_reserve_parameters_norm_history;
01049
01050 set_reserve_parameters_norm_history(new_reserve_parameters_norm_history);
01051
01052 file >> word;
01053
01054 if(word != "</ReserveParametersNormHistory>")
01055 {
01056 std::cerr << "Flood Error: GradientDescent class." << std::endl
01057 << "void load(const char*) method." << std::endl
01058 << "Unknown reserve parameters norm history end tag: " << word << std::endl;
01059
01060 exit(1);
01061 }
01062 }
01063 else if(word == "<ReserveEvaluationHistory>")
01064 {
01065 bool new_reserve_evaluation_history;
01066
01067 file >> new_reserve_evaluation_history;
01068
01069 set_reserve_evaluation_history(new_reserve_evaluation_history);
01070
01071 file >> word;
01072
01073 if(word != "</ReserveEvaluationHistory>")
01074 {
01075 std::cerr << "Flood Error: GradientDescent class." << std::endl
01076 << "void load(const char*) method." << std::endl
01077 << "Unknown reserve evaluation history end tag: " << word << std::endl;
01078
01079 exit(1);
01080 }
01081 }
01082 else if(word == "<ReserveGradientHistory>")
01083 {
01084 bool new_reserve_gradient_history;
01085
01086 file >> new_reserve_gradient_history;
01087
01088 set_reserve_gradient_history(new_reserve_gradient_history);
01089
01090 file >> word;
01091
01092 if(word != "</ReserveGradientHistory>")
01093 {
01094 std::cerr << "Flood Error: GradientDescent class." << std::endl
01095 << "void load(const char*) method." << std::endl
01096 << "Unknown reserve gradient history end tag: " << word << std::endl;
01097
01098 exit(1);
01099 }
01100 }
01101 else if(word == "<ReserveGradientNormHistory>")
01102 {
01103 bool new_reserve_gradient_norm_history;
01104
01105 file >> new_reserve_gradient_norm_history;
01106
01107 set_reserve_gradient_norm_history(new_reserve_gradient_norm_history);
01108
01109 file >> word;
01110
01111 if(word != "</ReserveGradientNormHistory>")
01112 {
01113 std::cerr << "Flood Error: GradientDescent class." << std::endl
01114 << "void load(const char*) method." << std::endl
01115 << "Unknown reserve gradient norm history end tag: " << word << std::endl;
01116
01117 exit(1);
01118 }
01119 }
01120 else if(word == "<ReserveTrainingDirectionHistory>")
01121 {
01122 bool new_reserve_training_direction_history;
01123
01124 file >> new_reserve_training_direction_history;
01125
01126 set_reserve_training_direction_history(new_reserve_training_direction_history);
01127
01128 file >> word;
01129
01130 if(word != "</ReserveTrainingDirectionHistory>")
01131 {
01132 std::cerr << "Flood Error: GradientDescent class." << std::endl
01133 << "void load(const char*) method." << std::endl
01134 << "Unknown reserve training direction history end tag: " << word << std::endl;
01135
01136 exit(1);
01137 }
01138 }
01139
01140 else if(word == "<ReserveTrainingRateHistory>")
01141 {
01142 bool new_reserve_training_rate_history;
01143
01144 file >> new_reserve_training_rate_history;
01145
01146 set_reserve_training_rate_history(new_reserve_training_rate_history);
01147
01148 file >> word;
01149
01150 if(word != "</ReserveTrainingRateHistory>")
01151 {
01152 std::cerr << "Flood Error: GradientDescent class." << std::endl
01153 << "void load(const char*) method." << std::endl
01154 << "Unknown reserve training rate history end tag: " << word << std::endl;
01155
01156 exit(1);
01157 }
01158 }
01159
01160 else if(word == "<ReserveElapsedTimeHistory>")
01161 {
01162 bool new_reserve_elapsed_time_history;
01163
01164 file >> new_reserve_elapsed_time_history;
01165
01166 set_reserve_elapsed_time_history(new_reserve_elapsed_time_history);
01167
01168 file >> word;
01169
01170 if(word != "</ReserveElapsedTimeHistory>")
01171 {
01172 std::cerr << "Flood Error: GradientDescent class." << std::endl
01173 << "void load(const char*) method." << std::endl
01174 << "Unknown reserve elapsed time history end tag: " << word << std::endl;
01175
01176 exit(1);
01177 }
01178 }
01179
01180 else if(word == "<ReserveValidationErrorHistory>")
01181 {
01182 bool new_reserve_validation_error_history;
01183
01184 file >> new_reserve_validation_error_history;
01185
01186 set_reserve_validation_error_history(new_reserve_validation_error_history);
01187
01188 file >> word;
01189
01190 if(word != "</ReserveValidationErrorHistory>")
01191 {
01192 std::cerr << "Flood Error: GradientDescent class." << std::endl
01193 << "void load(const char*) method." << std::endl
01194 << "Unknown reserve validation error history end tag: " << word << std::endl;
01195
01196 exit(1);
01197 }
01198 }
01199
01200
01201
01202 else if(word == "<Display>")
01203 {
01204 bool new_display;
01205
01206 file >> new_display;
01207
01208 set_display(new_display);
01209
01210 file >> word;
01211
01212 if(word != "</Display>")
01213 {
01214 std::cerr << "Flood Error: GradientDescent class." << std::endl
01215 << "void load(const char*) method." << std::endl
01216 << "Unknown display end tag: " << word << std::endl;
01217
01218 exit(1);
01219 }
01220 }
01221 else if(word == "<DisplayPeriod>")
01222 {
01223 int new_display_period;
01224
01225 file >> new_display_period;
01226
01227 set_display_period(new_display_period);
01228
01229 file >> word;
01230
01231 if(word != "</DisplayPeriod>")
01232 {
01233 std::cerr << "Flood Error: GradientDescent class." << std::endl
01234 << "void load(const char*) method." << std::endl
01235 << "Unknown display period end tag: " << word << std::endl;
01236
01237 exit(1);
01238 }
01239 }
01240 else
01241 {
01242
01243 }
01244 }
01245
01246
01247
01248 file.close();
01249 }
01250
01251
01252
01253
01269
01270 void GradientDescent::resize_training_history(int new_size)
01271 {
01272
01273
01274 if(reserve_parameters_history)
01275 {
01276 parameters_history.resize(new_size);
01277 }
01278
01279
01280
01281 if(reserve_parameters_norm_history)
01282 {
01283 parameters_norm_history.resize(new_size);
01284 }
01285
01286
01287
01288 if(reserve_evaluation_history)
01289 {
01290 evaluation_history.resize(new_size);
01291 }
01292
01293
01294
01295 if(reserve_validation_error_history)
01296 {
01297 validation_error_history.resize(new_size);
01298 }
01299
01300
01301
01302 if(reserve_gradient_history)
01303 {
01304 gradient_history.resize(new_size);
01305 }
01306
01307
01308
01309 if(reserve_gradient_norm_history)
01310 {
01311 gradient_norm_history.resize(new_size);
01312 }
01313
01314
01315
01316 if(reserve_training_direction_history)
01317 {
01318 training_direction_history.resize(new_size);
01319 }
01320
01321
01322
01323 if(reserve_training_rate_history)
01324 {
01325 training_rate_history.resize(new_size);
01326 }
01327
01328
01329
01330 if(reserve_elapsed_time_history)
01331 {
01332 elapsed_time_history.resize(new_size);
01333 }
01334 }
01335
01336 }
01337
01338
01339
01340
01341
01342
01343
01344
01345
01346
01347
01348
01349
01350
01351
01352
01353