00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016 #include <iostream>
00017 #include <sstream>
00018 #include <cstdlib>
00019 #include <ctime>
00020
00021 #include "Perceptron.h"
00022 #include "../Utilities/Vector.h"
00023
00024 namespace Flood
00025 {
00026
00034
00035 Perceptron::Perceptron(void)
00036 {
00037 set();
00038 }
00039
00040
00050
00051 Perceptron::Perceptron(int new_inputs_number)
00052 {
00053 set(new_inputs_number);
00054 }
00055
00056
00061
00062 Perceptron::Perceptron(int new_inputs_number, double new_parameters_value)
00063 {
00064 set(new_inputs_number, new_parameters_value);
00065 }
00066
00067
00071
00072 Perceptron::Perceptron(const char* filename)
00073 {
00074 load(filename);
00075 }
00076
00077
00080
00081 Perceptron::Perceptron(const Perceptron& other_perceptron)
00082 {
00083 activation_function = other_perceptron.activation_function;
00084
00085 inputs_number = other_perceptron.inputs_number;
00086
00087 bias = other_perceptron.bias;
00088
00089 synaptic_weights = other_perceptron.synaptic_weights;
00090
00091 display = other_perceptron.display;
00092 }
00093
00094
00096
00097 Perceptron::~Perceptron(void)
00098 {
00099 }
00100
00101
00102
00103
00106
00107 Perceptron& Perceptron::operator=(const Perceptron& other_perceptron)
00108 {
00109 if(this != &other_perceptron)
00110 {
00111 activation_function = other_perceptron.activation_function;
00112
00113 inputs_number = other_perceptron.inputs_number;
00114
00115 bias = other_perceptron.bias;
00116
00117 synaptic_weights = other_perceptron.synaptic_weights;
00118
00119 display = other_perceptron.display;
00120 }
00121
00122 return(*this);
00123 }
00124
00125
00126
00127
00128
00129
00130
00132
00133 Perceptron::ActivationFunction& Perceptron::get_activation_function(void)
00134 {
00135 return(activation_function);
00136 }
00137
00138
00139
00140
00142
00143 std::string Perceptron::get_activation_function_name(void)
00144 {
00145 switch(activation_function)
00146 {
00147 case Perceptron::Logistic:
00148 {
00149 return("Logistic");
00150 }
00151 break;
00152
00153 case Perceptron::HyperbolicTangent:
00154 {
00155 return("HyperbolicTangent");
00156 }
00157 break;
00158
00159 case Perceptron::Threshold:
00160 {
00161 return("Threshold");
00162 }
00163 break;
00164
00165 case Perceptron::SymmetricThreshold:
00166 {
00167 return("SymmetricThreshold");
00168 }
00169 break;
00170
00171 case Perceptron::Linear:
00172 {
00173 return("Linear");
00174 }
00175 break;
00176
00177 default:
00178 {
00179 std::cerr << "Flood Error: Perceptron class." << std::endl
00180 << "std::string get_activation_function(void) method." << std::endl
00181 << "Unknown activation function." << std::endl;
00182
00183 exit(1);
00184 }
00185 break;
00186 }
00187 }
00188
00189
00190
00191
00193
00194 double Perceptron::get_bias(void)
00195 {
00196 return(bias);
00197 }
00198
00199
00200
00201
00203
00204 Vector<double>& Perceptron::get_synaptic_weights(void)
00205 {
00206 return(synaptic_weights);
00207 }
00208
00209
00210
00211
00214
00215 double Perceptron::get_synaptic_weight(int synaptic_weight_index)
00216 {
00217
00218
00219 #ifdef _DEBUG
00220
00221 if(synaptic_weight_index < 0 || synaptic_weight_index >= inputs_number)
00222 {
00223 std::cerr << "Flood Error: Perceptron class." << std::endl
00224 << "double get_synaptic_weight(int) method." << std::endl
00225 << "Index of synaptic weight must be equal or greater than zero and less than number of inputs."
00226 << std::endl;
00227
00228 exit(1);
00229 }
00230
00231 #endif
00232
00233
00234
00235 return(synaptic_weights[synaptic_weight_index]);
00236 }
00237
00238
00239
00240
00243
00244 bool Perceptron::get_display(void)
00245 {
00246 return(display);
00247 }
00248
00249
00250
00251
00253
00254 void Perceptron::set(void)
00255 {
00256 activation_function = HyperbolicTangent;
00257
00258 inputs_number = 0;
00259
00260 initialize_bias_normal(0.0, 0.2);
00261
00262 display = true;
00263 }
00264
00265
00266
00267
00271
00272 void Perceptron::set(int new_inputs_number)
00273 {
00274
00275
00276 activation_function = HyperbolicTangent;
00277
00278 inputs_number = new_inputs_number;
00279
00280 synaptic_weights.set(inputs_number);
00281
00282
00283
00284 initialize_bias_normal(0.0, 0.2);
00285 initialize_synaptic_weights_normal(0.0, 0.2);
00286
00287 display = true;
00288 }
00289
00290
00291
00292
00297
00298 void Perceptron::set(int new_inputs_number, double new_parameters_value)
00299 {
00300 inputs_number = new_inputs_number;
00301
00302 activation_function = HyperbolicTangent;
00303
00304 bias = new_parameters_value;
00305 synaptic_weights.set(inputs_number, new_parameters_value);
00306
00307 display = true;
00308 }
00309
00310
00311
00312
00315
00316 void Perceptron::set(const Perceptron& other_perceptron)
00317 {
00318 activation_function = other_perceptron.activation_function;
00319
00320 inputs_number = other_perceptron.inputs_number;
00321
00322 bias = other_perceptron.bias;
00323
00324 synaptic_weights = other_perceptron.synaptic_weights;
00325
00326 display = other_perceptron.display;
00327 }
00328
00329
00330
00331
00334
00335 void Perceptron::set_activation_function(const Perceptron::ActivationFunction& new_activation_function)
00336 {
00337 activation_function = new_activation_function;
00338 }
00339
00340
00341
00342
00346
00347 void Perceptron::set_activation_function(const std::string& new_activation_function_name)
00348 {
00349 if(new_activation_function_name == "Logistic")
00350 {
00351 activation_function = Logistic;
00352 }
00353 else if(new_activation_function_name == "HyperbolicTangent")
00354 {
00355 activation_function = HyperbolicTangent;
00356 }
00357 else if(new_activation_function_name == "Threshold")
00358 {
00359 activation_function = Threshold;
00360 }
00361 else if(new_activation_function_name == "SymmetricThreshold")
00362 {
00363 activation_function = SymmetricThreshold;
00364 }
00365 else
00366 {
00367 std::cerr << "Flood Error: Perceptron class." << std::endl
00368 << "void set_activation_function(const std::string&) method." << std::endl
00369 << "Unknown activation function." << std::endl;
00370
00371 exit(1);
00372 }
00373 }
00374
00375
00376
00377
00380
00381 void Perceptron::set_bias(double new_bias)
00382 {
00383 bias = new_bias;
00384 }
00385
00386
00387
00388
00391
00392 void Perceptron::set_synaptic_weights(const Vector<double>& new_synaptic_weights)
00393 {
00394
00395
00396 #ifdef _DEBUG
00397
00398 if(new_synaptic_weights.get_size() != inputs_number)
00399 {
00400 std::cerr << "Flood Error: Perceptron class." << std::endl
00401 << "void set_synaptic_weights(const Vector<double>&) method." << std::endl
00402 << "Size of synaptic weights vector must be equal to number of inputs." << std::endl;
00403
00404 exit(1);
00405 }
00406
00407 #endif
00408
00409
00410
00411 synaptic_weights = new_synaptic_weights;
00412 }
00413
00414
00415
00416
00420
00421 void Perceptron::set_synaptic_weight(int synaptic_weight_index, double new_synaptic_weight)
00422 {
00423
00424
00425 #ifdef _DEBUG
00426
00427 if(synaptic_weight_index < 0 || synaptic_weight_index >= inputs_number)
00428 {
00429 std::cerr << "Flood Error: Perceptron class." << std::endl
00430 << "void set_synaptic_weight(int, double) method." << std::endl
00431 << "Index of synaptic weight must be equal or greater than zero and less than number of inputs."
00432 << std::endl;
00433
00434 exit(1);
00435 }
00436
00437 #endif
00438
00439
00440
00441 synaptic_weights[synaptic_weight_index] = new_synaptic_weight;
00442 }
00443
00444
00445
00446
00451
00452 void Perceptron::set_display(bool new_display)
00453 {
00454 display = new_display;
00455 }
00456
00457
00458
00459
00464
00465 void Perceptron::set_inputs_number(int new_inputs_number)
00466 {
00467
00468
00469 #ifdef _DEBUG
00470
00471 if(new_inputs_number < 0)
00472 {
00473 std::cerr << "Flood Error: Perceptron class." << std::endl
00474 << "void set_inputs_number(int) method." << std::endl
00475 << "Number of inputs (" << new_inputs_number << ") must be equal or greater than zero: "
00476 << std::endl;
00477
00478 exit(1);
00479 }
00480
00481 #endif
00482
00483 inputs_number = new_inputs_number;
00484
00485 synaptic_weights.set_size(inputs_number);
00486
00487
00488
00489 initialize_bias_normal(0.0,1.0);
00490 initialize_synaptic_weights_normal(0.0,1.0);
00491 }
00492
00493
00494
00495
00497
00498 int Perceptron::get_parameters_number(void)
00499 {
00500 int parameters_number = 1 + inputs_number;
00501
00502 return(parameters_number);
00503 }
00504
00505
00506
00507
00509
00510 Vector<double> Perceptron::get_parameters(void)
00511 {
00512 int parameters_number = get_parameters_number();
00513
00514 Vector<double> parameters(parameters_number);
00515
00516 parameters[0] = bias;
00517
00518 for(int i = 0; i < inputs_number; i++)
00519 {
00520 parameters[1+i] = synaptic_weights[i];
00521 }
00522
00523 return(parameters);
00524 }
00525
00526
00527
00528
00531
00532 void Perceptron::set_parameters(const Vector<double>& new_parameters)
00533 {
00534
00535
00536 #ifdef _DEBUG
00537
00538 int size = new_parameters.get_size();
00539
00540 if(size != 1+inputs_number)
00541 {
00542 std::cerr << "Flood Error: Perceptron class." << std::endl
00543 << "void set_parameters(const Vector<double>&) method." << std::endl
00544 << "Size must be equal to one plus number of inputs." << std::endl;
00545
00546 exit(1);
00547 }
00548
00549 #endif
00550
00551 bias = new_parameters[0];
00552
00553 for(int i = 0; i < inputs_number; i++)
00554 {
00555 synaptic_weights[i] = new_parameters[i+1];
00556 }
00557 }
00558
00559
00560
00561
00564
00565 void Perceptron::initialize_bias(double value)
00566 {
00567 bias = value;
00568 }
00569
00570
00571
00572
00576
00577 void Perceptron::initialize_bias_uniform(double minimum, double maximum)
00578 {
00579
00580
00581 #ifdef _DEBUG
00582
00583 if(minimum > maximum)
00584 {
00585 std::cerr << "Flood Error: Perceptron class." << std::endl
00586 << "initialize_bias_uniform(double, double) method." << std::endl
00587 << "Minimum value must be less than maximum value." << std::endl;
00588
00589 exit(1);
00590 }
00591
00592 #endif
00593
00594 bias = calculate_random_uniform(minimum, maximum);
00595 }
00596
00597
00598
00599
00600 void Perceptron::initialize_synaptic_weights(double value)
00601 {
00602 synaptic_weights.initialize(value);
00603 }
00604
00605
00606
00607
00611
00612 void Perceptron::initialize_synaptic_weights_uniform(double minimum, double maximum)
00613 {
00614 synaptic_weights.initialize_uniform(minimum, maximum);
00615 }
00616
00617
00618
00619
00623
00624 void Perceptron::initialize_bias_normal(double mean, double standard_deviation)
00625 {
00626
00627
00628 #ifdef _DEBUG
00629
00630 if(standard_deviation < 0.0)
00631 {
00632 std::cerr << "Flood Error: Perceptron class." << std::endl
00633 << "initialize_bias_normal(double, double) method." << std::endl
00634 << "Standard deviation must be equal or greater than zero." << std::endl;
00635
00636 exit(1);
00637 }
00638
00639 #endif
00640
00641 bias = calculate_random_normal(mean, standard_deviation);
00642 }
00643
00644
00645
00646
00650
00651 void Perceptron::initialize_synaptic_weights_normal(double mean, double standard_deviation)
00652 {
00653 synaptic_weights.initialize_normal(mean, standard_deviation);
00654 }
00655
00656
00657
00658
00661
00662 void Perceptron::initialize_parameters(double value)
00663 {
00664 bias = value;
00665 synaptic_weights.initialize(value);
00666 }
00667
00668
00669
00670
00674
00675 double Perceptron::calculate_combination(const Vector<double>& input)
00676 {
00677
00678
00679 #ifdef _DEBUG
00680
00681 if(inputs_number == 0)
00682 {
00683 std::cerr << "Flood Error: Perceptron class." << std::endl
00684 << "calculate_combination(const Vector<double>&) method." << std::endl
00685 << "Number of inputs in the neuron must be greater than zero." << std::endl;
00686
00687 exit(1);
00688 }
00689 else if(input.get_size() != inputs_number)
00690 {
00691 std::cerr << "Flood Error: Perceptron class." << std::endl
00692 << "double calculate_combination(const Vector<double>&) method." << std::endl
00693 << "Size of input must be equal to number of inputs." << std::endl;
00694
00695 exit(1);
00696 }
00697
00698 #endif
00699
00700
00701
00702 return(bias + synaptic_weights.dot(input));
00703 }
00704
00705
00706
00707
00711
00712 double Perceptron::calculate_activation(double combination)
00713 {
00714 switch(activation_function)
00715 {
00716 case Perceptron::Logistic:
00717 {
00718 return(1.0/(1.0 + exp(-combination)));
00719 }
00720 break;
00721
00722 case Perceptron::HyperbolicTangent:
00723 {
00724 return(tanh(combination));
00725 }
00726 break;
00727
00728 case Perceptron::Threshold:
00729 {
00730 if(combination < 0)
00731 {
00732 return(0.0);
00733 }
00734 else
00735 {
00736 return(1.0);
00737 }
00738 }
00739 break;
00740
00741 case Perceptron::SymmetricThreshold:
00742 {
00743 if(combination < 0)
00744 {
00745 return(-1.0);
00746 }
00747 else
00748 {
00749 return(1.0);
00750 }
00751 }
00752 break;
00753
00754 case Perceptron::Linear:
00755 {
00756 return(combination);
00757 }
00758 break;
00759
00760 default:
00761 {
00762 std::cerr << "Flood Error: Perceptron class." << std::endl
00763 << "calculate_activation(double) method." << std::endl
00764 << "Unknown activation function." << std::endl;
00765
00766 exit(1);
00767 }
00768 break;
00769 }
00770 }
00771
00772
00773
00774
00778
00779 double Perceptron::calculate_activation_derivative(double combination)
00780 {
00781 switch(activation_function)
00782 {
00783 case Perceptron::Logistic:
00784 {
00785 return(exp(combination)/pow(1.0 + exp(combination), 2));
00786 }
00787 break;
00788
00789 case Perceptron::HyperbolicTangent:
00790 {
00791 return(1.0 - pow(tanh(combination), 2));
00792 }
00793 break;
00794
00795 case Perceptron::Threshold:
00796 {
00797 std::cerr << "Flood Error: Perceptron class." << std::endl
00798 << "calculate_activation_derivative(double) method." << std::endl
00799 << "Threshold activation function is not derivable." << std::endl;
00800
00801 exit(1);
00802 }
00803 break;
00804
00805 case Perceptron::SymmetricThreshold:
00806 {
00807 std::cerr << "Flood Error: Perceptron class." << std::endl
00808 << "calculate_activation_derivative(double) method." << std::endl
00809 << "Symmetric threshold activation function is not derivable." << std::endl;
00810
00811 exit(1);
00812 }
00813 break;
00814
00815 case Perceptron::Linear:
00816 {
00817 return(1.0);
00818 }
00819 break;
00820
00821 default:
00822 {
00823 std::cerr << "Flood Error: Perceptron class." << std::endl
00824 << "calculate_activation_derivative(double) method." << std::endl
00825 << "Unknown activation function." << std::endl;
00826
00827 exit(1);
00828 }
00829 break;
00830 }
00831 }
00832
00833
00834
00835
00839
00840 double Perceptron::calculate_activation_second_derivative(double combination)
00841 {
00842 switch(activation_function)
00843 {
00844 case Perceptron::Logistic:
00845 {
00846 return(-exp(combination)*(exp(combination) - 1.0)/pow(exp(combination + 1), 3));
00847 }
00848 break;
00849
00850 case Perceptron::HyperbolicTangent:
00851 {
00852 return(-2.0*tanh(combination)*(1.0 - pow(tanh(combination),2)));
00853 }
00854 break;
00855
00856 case Perceptron::Threshold:
00857 {
00858 std::cerr << "Flood Error: Perceptron class." << std::endl
00859 << "calculate_activation_second_derivative(double) method." << std::endl
00860 << "Threshold activation function is not derivable." << std::endl;
00861
00862 exit(1);
00863 }
00864 break;
00865
00866 case Perceptron::SymmetricThreshold:
00867 {
00868 std::cerr << "Flood Error: Perceptron class." << std::endl
00869 << "calculate_activation_second_derivative(double) method." << std::endl
00870 << "Symmetric threshold activation function is not derivable." << std::endl;
00871
00872 exit(1);
00873 }
00874 break;
00875
00876 case Perceptron::Linear:
00877 {
00878 return(0.0);
00879 }
00880 break;
00881
00882 default:
00883 {
00884 std::cerr << "Flood Error: Perceptron class." << std::endl
00885 << "calculate_activation_second_derivative(double) method." << std::endl
00886 << "Unknown activation function." << std::endl;
00887
00888 exit(1);
00889 }
00890 break;
00891 }
00892 }
00893
00894
00895
00896
00900
00901 double Perceptron::calculate_output(const Vector<double>& input)
00902 {
00903
00904
00905 #ifdef _DEBUG
00906
00907 int size = input.get_size();
00908
00909 if(size != inputs_number)
00910 {
00911 std::cerr << "Flood Error: Perceptron class." << std::endl
00912 << "double calculate_output(const Vector<double>&) method." << std::endl
00913 << "Size must be equal to number of inputs." << std::endl;
00914
00915 exit(1);
00916 }
00917
00918 #endif
00919
00920
00921
00922 return(calculate_activation(calculate_combination(input)));
00923 }
00924
00925
00926
00927
00930
00931 std::string Perceptron::to_XML(bool show_declaration)
00932 {
00933 std::stringstream buffer;
00934
00935
00936
00937 if(show_declaration)
00938 {
00939 buffer << "<Flood version='3.0' class='Perceptron'>" << std::endl;
00940 }
00941
00942
00943
00944 buffer << "<InputsNumber>" << std::endl
00945 << inputs_number << std::endl
00946 << "</InputsNumber>" << std::endl;
00947
00948
00949
00950 buffer << "<ActivationFunction>" << std::endl
00951 << get_activation_function_name() << std::endl
00952 << "</ActivationFunction>" << std::endl;
00953
00954
00955
00956 buffer << "<Bias>" << std::endl
00957 << bias << std::endl
00958 << "</Bias>" << std::endl;
00959
00960
00961
00962 buffer << "<SynapticWeights>" << std::endl
00963 << synaptic_weights << std::endl
00964 << "</SynapticWeights>" << std::endl;
00965
00966
00967
00968 buffer << "<Display>" << std::endl
00969 << display << std::endl
00970 << "</Display>" << std::endl;
00971
00972 return(buffer.str());
00973 }
00974
00975
00976
00977
00979
00980 void Perceptron::print(void)
00981 {
00982 if(display)
00983 {
00984 std::cout << to_XML(true);
00985 }
00986 }
00987
00988
00989
00990
00993
00994 void Perceptron::save(const char* filename)
00995 {
00996 std::fstream file;
00997
00998
00999
01000 file.open(filename, std::ios::out);
01001
01002 if(!file.is_open())
01003 {
01004 std::cerr << "Flood Error: Perceptron class." << std::endl
01005 << "void save(const char*) method." << std::endl
01006 << "Cannot open perceptron XML-type file." << std::endl;
01007
01008 exit(1);
01009 }
01010
01011
01012
01013 file << to_XML(true);
01014
01015
01016
01017 file.close();
01018 }
01019
01020
01021
01022
01025
01026 void Perceptron::load(const char* filename)
01027 {
01028 std::fstream file;
01029
01030 file.open(filename, std::ios::in);
01031
01032 if(!file.is_open())
01033 {
01034 std::cerr << "Flood Error: Perceptron class." << std::endl
01035 << "void load(const char*) method." << std::endl
01036 << "Cannot open perceptron XML-type file." << std::endl;
01037
01038 exit(1);
01039 }
01040
01041 std::string line;
01042 std::string word;
01043
01044
01045
01046 getline(file, line);
01047
01048 if(line != "<Flood version='3.0' class='Perceptron'>")
01049 {
01050
01051
01052
01053
01054
01055 }
01056
01057
01058
01059 file >> word;
01060
01061 if(word != "<InputsNumber>")
01062 {
01063 std::cerr << "Flood Error: Perceptron class." << std::endl
01064 << "void load(const char*) method." << std::endl
01065 << "Unknown inputs number begin tag: " << line << std::endl;
01066
01067 exit(1);
01068 }
01069
01070 int new_inputs_number;
01071 file >> new_inputs_number;
01072 set_inputs_number(new_inputs_number);
01073
01074 file >> word;
01075
01076 if(word != "</InputsNumber>")
01077 {
01078 std::cerr << "Flood Error: Perceptron class." << std::endl
01079 << "void load(const char*) method." << std::endl
01080 << "Unknown inputs number end tag: " << line << std::endl;
01081
01082 exit(1);
01083 }
01084
01085
01086
01087 file >> word;
01088
01089 if(word != "<ActivationFunction>")
01090 {
01091 std::cerr << "Flood Error: Perceptron class." << std::endl
01092 << "void load(const char*) method." << std::endl
01093 << "Unknown activation function begin tag: " << line << std::endl;
01094
01095 exit(1);
01096 }
01097
01098 std::string new_activation_function_name;
01099 file >> new_activation_function_name;
01100 set_activation_function(new_activation_function_name);
01101
01102 file >> word;
01103
01104 if(word != "</ActivationFunction>")
01105 {
01106 std::cerr << "Flood Error: Perceptron class." << std::endl
01107 << "void load(const char*) method." << std::endl
01108 << "Unknown activation function end tag: " << line << std::endl;
01109
01110 exit(1);
01111 }
01112
01113
01114
01115 file >> word;
01116
01117 if(word != "<Bias>")
01118 {
01119 std::cerr << "Flood Error: Perceptron class." << std::endl
01120 << "void load(const char*) method." << std::endl
01121 << "Unknown bias begin tag: " << line << std::endl;
01122
01123 exit(1);
01124 }
01125
01126 double new_bias;
01127 file >> new_bias;
01128 set_bias(new_bias);
01129
01130 file >> word;
01131
01132 if(word != "</Bias>")
01133 {
01134 std::cerr << "Flood Error: Perceptron class." << std::endl
01135 << "void load(const char*) method." << std::endl
01136 << "Unknown bias end tag: " << line << std::endl;
01137
01138 exit(1);
01139 }
01140
01141
01142
01143 file >> word;
01144
01145 if(word != "<SynapticWeights>")
01146 {
01147 std::cerr << "Flood Error: Perceptron class." << std::endl
01148 << "void load(const char*) method." << std::endl
01149 << "Unknown synaptic weights begin tag: " << line << std::endl;
01150
01151 exit(1);
01152 }
01153
01154 Vector<double> new_synaptic_weights(inputs_number);
01155 file >> new_synaptic_weights;
01156 set_synaptic_weights(new_synaptic_weights);
01157
01158 file >> word;
01159
01160 if(word != "</SynapticWeights>")
01161 {
01162 std::cerr << "Flood Error: Perceptron class." << std::endl
01163 << "void load(const char*) method." << std::endl
01164 << "Unknown synaptic weights end tag: " << line << std::endl;
01165
01166 exit(1);
01167 }
01168
01169
01170
01171 file >> word;
01172
01173 if(word != "<Display>")
01174 {
01175 std::cerr << "Flood Error: Perceptron class." << std::endl
01176 << "void load(const char*) method." << std::endl
01177 << "Unknown display begin tag: " << line << std::endl;
01178
01179 exit(1);
01180 }
01181
01182 bool new_display;
01183 file >> new_display;
01184 set_display(new_display);
01185
01186 file >> word;
01187
01188 if(word != "</Display>")
01189 {
01190 std::cerr << "Flood Error: Perceptron class." << std::endl
01191 << "void load(const char*) method." << std::endl
01192 << "Unknown display end tag: " << line << std::endl;
01193
01194 exit(1);
01195 }
01196
01197 file.close();
01198 }
01199
01200
01201
01202
01206
01207 double Perceptron::calculate_random_uniform(double minimum, double maximum)
01208 {
01209 double random = (double)rand()/(RAND_MAX+1.0);
01210
01211 double random_uniform = minimum + (maximum-minimum)*random;
01212
01213 return(random_uniform);
01214 }
01215
01216
01217
01218
01222
01223 double Perceptron::calculate_random_normal(double mean, double standard_deviation)
01224 {
01225 double random_normal = 0.0;
01226
01227 const double pi = 4.0*atan(1.0);
01228
01229 double random_uniform_1;
01230 double random_uniform_2;
01231
01232 do
01233 {
01234 random_uniform_1 = (double)rand()/(RAND_MAX+1.0);
01235
01236 }while(random_uniform_1 == 0.0);
01237
01238 random_uniform_2 = (double)rand()/(RAND_MAX+1.0);
01239
01240
01241
01242 random_normal = mean + sqrt(-2.0*log(random_uniform_1))*sin(2.0*pi*random_uniform_2)*standard_deviation;
01243
01244 return(random_normal);
01245 }
01246
01247
01248 }
01249
01250
01251
01252
01253
01254
01255
01256
01257
01258
01259
01260
01261
01262
01263
01264
01265
01266
01267