00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044
00045
00046
00047
00048
00049
00050
00051
00052
00053
00054
00055
00056
00057
00058
00059
00060
00061
00062
00063
00064
00065
00066
00067
00068
00069
00070
00071
00072
00073
00074
00075
00076
00077
00078
00079
00080
00081
00082
00083
00084
00085
00086
00087
00088
00089
00090
00091
00092
00093
00094
00095
00096
00097
00098
00099
00100
00101
00102
00103
00104
00105
00106
00107
00108
00109
00110
00111
00112
00113
00114
00115
00116
00117
00118
00119
00120
00121
00122
00123
00124
00125
00126
00127
00128
00129
00130
00131
00132
00133
00134
00135
00136
00137
00138
00139
00140
00141
00142
00143
00144
00145
00146
00147
00148
00149
00150
00151
00152
00153
00154
00155
00156
00157
00158
00159
00160
00161
00162
00163
00164
00165
00166
00167
00168
00169
00170
00171
00172
00173
00174
00175
00176
00177
00178
00179
00180
00181
00182
00183
00184
00185
00186
00187
00188
00189
00190
00191
00192
00193
00194
00195
00196
00197
00198
00199
00200
00201
00202
00203
00204
00205
00206
00207
00208
00209
00210
00211
00212
00213
00214
00215
00216
00217
00218
00219
00220
00221 #include "TMultiLayerPerceptron.h"
00222 #include "TSynapse.h"
00223 #include "TNeuron.h"
00224 #include "TClass.h"
00225 #include "TTree.h"
00226 #include "TEventList.h"
00227 #include "TRandom3.h"
00228 #include "TTimeStamp.h"
00229 #include "TRegexp.h"
00230 #include "TCanvas.h"
00231 #include "TH2.h"
00232 #include "TGraph.h"
00233 #include "TLegend.h"
00234 #include "TMultiGraph.h"
00235 #include "TDirectory.h"
00236 #include "TSystem.h"
00237 #include "Riostream.h"
00238 #include "TMath.h"
00239 #include "TTreeFormula.h"
00240 #include "TTreeFormulaManager.h"
00241 #include "TMarker.h"
00242 #include "TLine.h"
00243 #include "TText.h"
00244 #include "TObjString.h"
00245 #include <stdlib.h>
00246
00247 ClassImp(TMultiLayerPerceptron)
00248
00249
00250 TMultiLayerPerceptron::TMultiLayerPerceptron()
00251 {
00252
00253 if(!TClass::GetClass("TTreePlayer")) gSystem->Load("libTreePlayer");
00254 fNetwork.SetOwner(true);
00255 fFirstLayer.SetOwner(false);
00256 fLastLayer.SetOwner(false);
00257 fSynapses.SetOwner(true);
00258 fData = 0;
00259 fCurrentTree = -1;
00260 fCurrentTreeWeight = 1;
00261 fStructure = "";
00262 fWeight = "1";
00263 fTraining = 0;
00264 fTrainingOwner = false;
00265 fTest = 0;
00266 fTestOwner = false;
00267 fEventWeight = 0;
00268 fManager = 0;
00269 fLearningMethod = TMultiLayerPerceptron::kBFGS;
00270 fEta = .1;
00271 fEtaDecay = 1;
00272 fDelta = 0;
00273 fEpsilon = 0;
00274 fTau = 3;
00275 fLastAlpha = 0;
00276 fReset = 50;
00277 fType = TNeuron::kSigmoid;
00278 fOutType = TNeuron::kLinear;
00279 fextF = "";
00280 fextD = "";
00281 }
00282
00283
00284 TMultiLayerPerceptron::TMultiLayerPerceptron(const char * layout, TTree * data,
00285 TEventList * training,
00286 TEventList * test,
00287 TNeuron::ENeuronType type,
00288 const char* extF, const char* extD)
00289 {
00290
00291
00292
00293
00294
00295
00296
00297
00298
00299
00300
00301
00302
00303
00304
00305
00306
00307 if(!TClass::GetClass("TTreePlayer")) gSystem->Load("libTreePlayer");
00308 fNetwork.SetOwner(true);
00309 fFirstLayer.SetOwner(false);
00310 fLastLayer.SetOwner(false);
00311 fSynapses.SetOwner(true);
00312 fStructure = layout;
00313 fData = data;
00314 fCurrentTree = -1;
00315 fCurrentTreeWeight = 1;
00316 fTraining = training;
00317 fTrainingOwner = false;
00318 fTest = test;
00319 fTestOwner = false;
00320 fWeight = "1";
00321 fType = type;
00322 fOutType = TNeuron::kLinear;
00323 fextF = extF;
00324 fextD = extD;
00325 fEventWeight = 0;
00326 fManager = 0;
00327 if (data) {
00328 BuildNetwork();
00329 AttachData();
00330 }
00331 fLearningMethod = TMultiLayerPerceptron::kBFGS;
00332 fEta = .1;
00333 fEpsilon = 0;
00334 fDelta = 0;
00335 fEtaDecay = 1;
00336 fTau = 3;
00337 fLastAlpha = 0;
00338 fReset = 50;
00339 }
00340
00341
00342 TMultiLayerPerceptron::TMultiLayerPerceptron(const char * layout,
00343 const char * weight, TTree * data,
00344 TEventList * training,
00345 TEventList * test,
00346 TNeuron::ENeuronType type,
00347 const char* extF, const char* extD)
00348 {
00349
00350
00351
00352
00353
00354
00355
00356
00357
00358
00359
00360
00361
00362
00363
00364
00365
00366 if(!TClass::GetClass("TTreePlayer")) gSystem->Load("libTreePlayer");
00367 fNetwork.SetOwner(true);
00368 fFirstLayer.SetOwner(false);
00369 fLastLayer.SetOwner(false);
00370 fSynapses.SetOwner(true);
00371 fStructure = layout;
00372 fData = data;
00373 fCurrentTree = -1;
00374 fCurrentTreeWeight = 1;
00375 fTraining = training;
00376 fTrainingOwner = false;
00377 fTest = test;
00378 fTestOwner = false;
00379 fWeight = weight;
00380 fType = type;
00381 fOutType = TNeuron::kLinear;
00382 fextF = extF;
00383 fextD = extD;
00384 fEventWeight = 0;
00385 fManager = 0;
00386 if (data) {
00387 BuildNetwork();
00388 AttachData();
00389 }
00390 fLearningMethod = TMultiLayerPerceptron::kBFGS;
00391 fEta = .1;
00392 fEtaDecay = 1;
00393 fDelta = 0;
00394 fEpsilon = 0;
00395 fTau = 3;
00396 fLastAlpha = 0;
00397 fReset = 50;
00398 }
00399
00400
00401 TMultiLayerPerceptron::TMultiLayerPerceptron(const char * layout, TTree * data,
00402 const char * training,
00403 const char * test,
00404 TNeuron::ENeuronType type,
00405 const char* extF, const char* extD)
00406 {
00407
00408
00409
00410
00411
00412
00413
00414
00415
00416
00417
00418
00419
00420
00421
00422
00423
00424
00425 if(!TClass::GetClass("TTreePlayer")) gSystem->Load("libTreePlayer");
00426 fNetwork.SetOwner(true);
00427 fFirstLayer.SetOwner(false);
00428 fLastLayer.SetOwner(false);
00429 fSynapses.SetOwner(true);
00430 fStructure = layout;
00431 fData = data;
00432 fCurrentTree = -1;
00433 fCurrentTreeWeight = 1;
00434 fTraining = new TEventList(Form("fTrainingList_%lu",(ULong_t)this));
00435 fTrainingOwner = true;
00436 fTest = new TEventList(Form("fTestList_%lu",(ULong_t)this));
00437 fTestOwner = true;
00438 fWeight = "1";
00439 TString testcut = test;
00440 if(testcut=="") testcut = Form("!(%s)",training);
00441 fType = type;
00442 fOutType = TNeuron::kLinear;
00443 fextF = extF;
00444 fextD = extD;
00445 fEventWeight = 0;
00446 fManager = 0;
00447 if (data) {
00448 BuildNetwork();
00449 data->Draw(Form(">>fTrainingList_%lu",(ULong_t)this),training,"goff");
00450 data->Draw(Form(">>fTestList_%lu",(ULong_t)this),(const char *)testcut,"goff");
00451 AttachData();
00452 }
00453 else {
00454 Warning("TMultiLayerPerceptron::TMultiLayerPerceptron","Data not set. Cannot define datasets");
00455 }
00456 fLearningMethod = TMultiLayerPerceptron::kBFGS;
00457 fEta = .1;
00458 fEtaDecay = 1;
00459 fDelta = 0;
00460 fEpsilon = 0;
00461 fTau = 3;
00462 fLastAlpha = 0;
00463 fReset = 50;
00464 }
00465
00466
00467 TMultiLayerPerceptron::TMultiLayerPerceptron(const char * layout,
00468 const char * weight, TTree * data,
00469 const char * training,
00470 const char * test,
00471 TNeuron::ENeuronType type,
00472 const char* extF, const char* extD)
00473 {
00474
00475
00476
00477
00478
00479
00480
00481
00482
00483
00484
00485
00486
00487
00488
00489
00490
00491
00492 if(!TClass::GetClass("TTreePlayer")) gSystem->Load("libTreePlayer");
00493 fNetwork.SetOwner(true);
00494 fFirstLayer.SetOwner(false);
00495 fLastLayer.SetOwner(false);
00496 fSynapses.SetOwner(true);
00497 fStructure = layout;
00498 fData = data;
00499 fCurrentTree = -1;
00500 fCurrentTreeWeight = 1;
00501 fTraining = new TEventList(Form("fTrainingList_%lu",(ULong_t)this));
00502 fTrainingOwner = true;
00503 fTest = new TEventList(Form("fTestList_%lu",(ULong_t)this));
00504 fTestOwner = true;
00505 fWeight = weight;
00506 TString testcut = test;
00507 if(testcut=="") testcut = Form("!(%s)",training);
00508 fType = type;
00509 fOutType = TNeuron::kLinear;
00510 fextF = extF;
00511 fextD = extD;
00512 fEventWeight = 0;
00513 fManager = 0;
00514 if (data) {
00515 BuildNetwork();
00516 data->Draw(Form(">>fTrainingList_%lu",(ULong_t)this),training,"goff");
00517 data->Draw(Form(">>fTestList_%lu",(ULong_t)this),(const char *)testcut,"goff");
00518 AttachData();
00519 }
00520 else {
00521 Warning("TMultiLayerPerceptron::TMultiLayerPerceptron","Data not set. Cannot define datasets");
00522 }
00523 fLearningMethod = TMultiLayerPerceptron::kBFGS;
00524 fEta = .1;
00525 fEtaDecay = 1;
00526 fDelta = 0;
00527 fEpsilon = 0;
00528 fTau = 3;
00529 fLastAlpha = 0;
00530 fReset = 50;
00531 }
00532
00533
00534 TMultiLayerPerceptron::~TMultiLayerPerceptron()
00535 {
00536
00537 if(fTraining && fTrainingOwner) delete fTraining;
00538 if(fTest && fTestOwner) delete fTest;
00539 }
00540
00541
00542 void TMultiLayerPerceptron::SetData(TTree * data)
00543 {
00544
00545 if (fData) {
00546 cerr << "Error: data already defined." << endl;
00547 return;
00548 }
00549 fData = data;
00550 if (data) {
00551 BuildNetwork();
00552 AttachData();
00553 }
00554 }
00555
00556
00557 void TMultiLayerPerceptron::SetEventWeight(const char * branch)
00558 {
00559
00560 fWeight=branch;
00561 if (fData) {
00562 if (fEventWeight) {
00563 fManager->Remove(fEventWeight);
00564 delete fEventWeight;
00565 }
00566 fManager->Add((fEventWeight = new TTreeFormula("NNweight",fWeight.Data(),fData)));
00567 }
00568 }
00569
00570
00571 void TMultiLayerPerceptron::SetTrainingDataSet(TEventList* train)
00572 {
00573
00574
00575 if(fTraining && fTrainingOwner) delete fTraining;
00576 fTraining = train;
00577 fTrainingOwner = false;
00578 }
00579
00580
00581 void TMultiLayerPerceptron::SetTestDataSet(TEventList* test)
00582 {
00583
00584
00585 if(fTest && fTestOwner) delete fTest;
00586 fTest = test;
00587 fTestOwner = false;
00588 }
00589
00590
00591 void TMultiLayerPerceptron::SetTrainingDataSet(const char * train)
00592 {
00593
00594
00595
00596 if(fTraining && fTrainingOwner) delete fTraining;
00597 fTraining = new TEventList(Form("fTrainingList_%lu",(ULong_t)this));
00598 fTrainingOwner = true;
00599 if (fData) {
00600 fData->Draw(Form(">>fTrainingList_%lu",(ULong_t)this),train,"goff");
00601 }
00602 else {
00603 Warning("TMultiLayerPerceptron::TMultiLayerPerceptron","Data not set. Cannot define datasets");
00604 }
00605 }
00606
00607
00608 void TMultiLayerPerceptron::SetTestDataSet(const char * test)
00609 {
00610
00611
00612
00613 if(fTest && fTestOwner) {delete fTest; fTest=0;}
00614 if(fTest) if(strncmp(fTest->GetName(),Form("fTestList_%lu",(ULong_t)this),10)) delete fTest;
00615 fTest = new TEventList(Form("fTestList_%lu",(ULong_t)this));
00616 fTestOwner = true;
00617 if (fData) {
00618 fData->Draw(Form(">>fTestList_%lu",(ULong_t)this),test,"goff");
00619 }
00620 else {
00621 Warning("TMultiLayerPerceptron::TMultiLayerPerceptron","Data not set. Cannot define datasets");
00622 }
00623 }
00624
00625
00626 void TMultiLayerPerceptron::SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
00627 {
00628
00629
00630
00631
00632
00633 fLearningMethod = method;
00634 }
00635
00636
00637 void TMultiLayerPerceptron::SetEta(Double_t eta)
00638 {
00639
00640
00641
00642 fEta = eta;
00643 }
00644
00645
00646 void TMultiLayerPerceptron::SetEpsilon(Double_t eps)
00647 {
00648
00649
00650
00651 fEpsilon = eps;
00652 }
00653
00654
00655 void TMultiLayerPerceptron::SetDelta(Double_t delta)
00656 {
00657
00658
00659
00660 fDelta = delta;
00661 }
00662
00663
00664 void TMultiLayerPerceptron::SetEtaDecay(Double_t ed)
00665 {
00666
00667
00668
00669 fEtaDecay = ed;
00670 }
00671
00672
00673 void TMultiLayerPerceptron::SetTau(Double_t tau)
00674 {
00675
00676
00677
00678 fTau = tau;
00679 }
00680
00681
00682 void TMultiLayerPerceptron::SetReset(Int_t reset)
00683 {
00684
00685
00686
00687
00688 fReset = reset;
00689 }
00690
00691
00692 void TMultiLayerPerceptron::GetEntry(Int_t entry) const
00693 {
00694
00695 if (!fData) return;
00696 fData->GetEntry(entry);
00697 if (fData->GetTreeNumber() != fCurrentTree) {
00698 ((TMultiLayerPerceptron*)this)->fCurrentTree = fData->GetTreeNumber();
00699 fManager->Notify();
00700 ((TMultiLayerPerceptron*)this)->fCurrentTreeWeight = fData->GetWeight();
00701 }
00702 Int_t nentries = fNetwork.GetEntriesFast();
00703 for (Int_t i=0;i<nentries;i++) {
00704 TNeuron *neuron = (TNeuron *)fNetwork.UncheckedAt(i);
00705 neuron->SetNewEvent();
00706 }
00707 }
00708
00709
00710 void TMultiLayerPerceptron::Train(Int_t nEpoch, Option_t * option, Double_t minE)
00711 {
00712
00713
00714
00715
00716
00717
00718
00719
00720
00721
00722
00723
00724 Int_t i;
00725 TString opt = option;
00726 opt.ToLower();
00727
00728 Int_t verbosity = 0;
00729 Bool_t newCanvas = true;
00730 Bool_t minE_Train = false;
00731 Bool_t minE_Test = false;
00732 if (opt.Contains("text"))
00733 verbosity += 1;
00734 if (opt.Contains("graph"))
00735 verbosity += 2;
00736 Int_t displayStepping = 1;
00737 if (opt.Contains("update=")) {
00738 TRegexp reg("update=[0-9]*");
00739 TString out = opt(reg);
00740 displayStepping = atoi(out.Data() + 7);
00741 }
00742 if (opt.Contains("current"))
00743 newCanvas = false;
00744 if (opt.Contains("minerrortrain"))
00745 minE_Train = true;
00746 if (opt.Contains("minerrortest"))
00747 minE_Test = true;
00748 TVirtualPad *canvas = 0;
00749 TMultiGraph *residual_plot = 0;
00750 TGraph *train_residual_plot = 0;
00751 TGraph *test_residual_plot = 0;
00752 if ((!fData) || (!fTraining) || (!fTest)) {
00753 Error("Train","Training/Test samples still not defined. Cannot train the neural network");
00754 return;
00755 }
00756 Info("Train","Using %d train and %d test entries.",
00757 fTraining->GetN(), fTest->GetN());
00758
00759 if (verbosity % 2)
00760 cout << "Training the Neural Network" << endl;
00761 if (verbosity / 2) {
00762 residual_plot = new TMultiGraph;
00763 if(newCanvas)
00764 canvas = new TCanvas("NNtraining", "Neural Net training");
00765 else {
00766 canvas = gPad;
00767 if(!canvas) canvas = new TCanvas("NNtraining", "Neural Net training");
00768 }
00769 train_residual_plot = new TGraph(nEpoch);
00770 test_residual_plot = new TGraph(nEpoch);
00771 canvas->SetLeftMargin(0.14);
00772 train_residual_plot->SetLineColor(4);
00773 test_residual_plot->SetLineColor(2);
00774 residual_plot->Add(train_residual_plot);
00775 residual_plot->Add(test_residual_plot);
00776 residual_plot->Draw("LA");
00777 residual_plot->GetXaxis()->SetTitle("Epoch");
00778 residual_plot->GetYaxis()->SetTitle("Error");
00779 }
00780
00781 if (!opt.Contains("+"))
00782 Randomize();
00783
00784 fLastAlpha = 0;
00785 Int_t els = fNetwork.GetEntriesFast() + fSynapses.GetEntriesFast();
00786 Double_t *buffer = new Double_t[els];
00787 Double_t *dir = new Double_t[els];
00788 for (i = 0; i < els; i++)
00789 buffer[i] = 0;
00790 Int_t matrix_size = fLearningMethod==TMultiLayerPerceptron::kBFGS ? els : 1;
00791 TMatrixD bfgsh(matrix_size, matrix_size);
00792 TMatrixD gamma(matrix_size, 1);
00793 TMatrixD delta(matrix_size, 1);
00794
00795 Double_t training_E = 1e10;
00796 Double_t test_E = 1e10;
00797 for (Int_t iepoch = 0; (iepoch < nEpoch) && (!minE_Train || training_E>minE) && (!minE_Test || test_E>minE) ; iepoch++) {
00798 switch (fLearningMethod) {
00799 case TMultiLayerPerceptron::kStochastic:
00800 {
00801 MLP_Stochastic(buffer);
00802 break;
00803 }
00804 case TMultiLayerPerceptron::kBatch:
00805 {
00806 ComputeDEDw();
00807 MLP_Batch(buffer);
00808 break;
00809 }
00810 case TMultiLayerPerceptron::kSteepestDescent:
00811 {
00812 ComputeDEDw();
00813 SteepestDir(dir);
00814 if (LineSearch(dir, buffer))
00815 MLP_Batch(buffer);
00816 break;
00817 }
00818 case TMultiLayerPerceptron::kRibierePolak:
00819 {
00820 ComputeDEDw();
00821 if (!(iepoch % fReset)) {
00822 SteepestDir(dir);
00823 } else {
00824 Double_t norm = 0;
00825 Double_t onorm = 0;
00826 for (i = 0; i < els; i++)
00827 onorm += dir[i] * dir[i];
00828 Double_t prod = 0;
00829 Int_t idx = 0;
00830 TNeuron *neuron = 0;
00831 TSynapse *synapse = 0;
00832 Int_t nentries = fNetwork.GetEntriesFast();
00833 for (i=0;i<nentries;i++) {
00834 neuron = (TNeuron *) fNetwork.UncheckedAt(i);
00835 prod -= dir[idx++] * neuron->GetDEDw();
00836 norm += neuron->GetDEDw() * neuron->GetDEDw();
00837 }
00838 nentries = fSynapses.GetEntriesFast();
00839 for (i=0;i<nentries;i++) {
00840 synapse = (TSynapse *) fSynapses.UncheckedAt(i);
00841 prod -= dir[idx++] * synapse->GetDEDw();
00842 norm += synapse->GetDEDw() * synapse->GetDEDw();
00843 }
00844 ConjugateGradientsDir(dir, (norm - prod) / onorm);
00845 }
00846 if (LineSearch(dir, buffer))
00847 MLP_Batch(buffer);
00848 break;
00849 }
00850 case TMultiLayerPerceptron::kFletcherReeves:
00851 {
00852 ComputeDEDw();
00853 if (!(iepoch % fReset)) {
00854 SteepestDir(dir);
00855 } else {
00856 Double_t norm = 0;
00857 Double_t onorm = 0;
00858 for (i = 0; i < els; i++)
00859 onorm += dir[i] * dir[i];
00860 TNeuron *neuron = 0;
00861 TSynapse *synapse = 0;
00862 Int_t nentries = fNetwork.GetEntriesFast();
00863 for (i=0;i<nentries;i++) {
00864 neuron = (TNeuron *) fNetwork.UncheckedAt(i);
00865 norm += neuron->GetDEDw() * neuron->GetDEDw();
00866 }
00867 nentries = fSynapses.GetEntriesFast();
00868 for (i=0;i<nentries;i++) {
00869 synapse = (TSynapse *) fSynapses.UncheckedAt(i);
00870 norm += synapse->GetDEDw() * synapse->GetDEDw();
00871 }
00872 ConjugateGradientsDir(dir, norm / onorm);
00873 }
00874 if (LineSearch(dir, buffer))
00875 MLP_Batch(buffer);
00876 break;
00877 }
00878 case TMultiLayerPerceptron::kBFGS:
00879 {
00880 SetGammaDelta(gamma, delta, buffer);
00881 if (!(iepoch % fReset)) {
00882 SteepestDir(dir);
00883 bfgsh.UnitMatrix();
00884 } else {
00885 if (GetBFGSH(bfgsh, gamma, delta)) {
00886 SteepestDir(dir);
00887 bfgsh.UnitMatrix();
00888 } else {
00889 BFGSDir(bfgsh, dir);
00890 }
00891 }
00892 if (DerivDir(dir) > 0) {
00893 SteepestDir(dir);
00894 bfgsh.UnitMatrix();
00895 }
00896 if (LineSearch(dir, buffer)) {
00897 bfgsh.UnitMatrix();
00898 SteepestDir(dir);
00899 if (LineSearch(dir, buffer)) {
00900 Error("TMultiLayerPerceptron::Train()","Line search fail");
00901 iepoch = nEpoch;
00902 }
00903 }
00904 break;
00905 }
00906 }
00907
00908
00909 if (isnan(GetError(TMultiLayerPerceptron::kTraining))) {
00910 Error("TMultiLayerPerceptron::Train()","Stop.");
00911 iepoch = nEpoch;
00912 }
00913
00914
00915 gSystem->ProcessEvents();
00916 training_E = TMath::Sqrt(GetError(TMultiLayerPerceptron::kTraining) / fTraining->GetN());
00917 test_E = TMath::Sqrt(GetError(TMultiLayerPerceptron::kTest) / fTest->GetN());
00918
00919 if ((verbosity % 2) && ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1))) {
00920 cout << "Epoch: " << iepoch
00921 << " learn=" << training_E
00922 << " test=" << test_E
00923 << endl;
00924 }
00925 if (verbosity / 2) {
00926 train_residual_plot->SetPoint(iepoch, iepoch,training_E);
00927 test_residual_plot->SetPoint(iepoch, iepoch,test_E);
00928 if (!iepoch) {
00929 Double_t trp = train_residual_plot->GetY()[iepoch];
00930 Double_t tep = test_residual_plot->GetY()[iepoch];
00931 for (i = 1; i < nEpoch; i++) {
00932 train_residual_plot->SetPoint(i, i, trp);
00933 test_residual_plot->SetPoint(i, i, tep);
00934 }
00935 }
00936 if ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1)) {
00937 residual_plot->GetYaxis()->UnZoom();
00938 residual_plot->GetYaxis()->SetTitleOffset(1.4);
00939 residual_plot->GetYaxis()->SetDecimals();
00940 canvas->Modified();
00941 canvas->Update();
00942 }
00943 }
00944 }
00945
00946 delete [] buffer;
00947 delete [] dir;
00948
00949 if (verbosity % 2)
00950 cout << "Training done." << endl;
00951 if (verbosity / 2) {
00952 TLegend *legend = new TLegend(.75, .80, .95, .95);
00953 legend->AddEntry(residual_plot->GetListOfGraphs()->At(0),
00954 "Training sample", "L");
00955 legend->AddEntry(residual_plot->GetListOfGraphs()->At(1),
00956 "Test sample", "L");
00957 legend->Draw();
00958 }
00959 }
00960
00961
00962 Double_t TMultiLayerPerceptron::Result(Int_t event, Int_t index) const
00963 {
00964
00965
00966 GetEntry(event);
00967 TNeuron *out = (TNeuron *) (fLastLayer.At(index));
00968 if (out)
00969 return out->GetValue();
00970 else
00971 return 0;
00972 }
00973
00974
00975 Double_t TMultiLayerPerceptron::GetError(Int_t event) const
00976 {
00977
00978 GetEntry(event);
00979 Double_t error = 0;
00980
00981 Int_t nEntries = fLastLayer.GetEntriesFast();
00982 if (nEntries == 0) return 0.0;
00983 switch (fOutType) {
00984 case (TNeuron::kSigmoid):
00985 error = GetCrossEntropyBinary();
00986 break;
00987 case (TNeuron::kSoftmax):
00988 error = GetCrossEntropy();
00989 break;
00990 case (TNeuron::kLinear):
00991 error = GetSumSquareError();
00992 break;
00993 default:
00994
00995 error = GetSumSquareError();
00996 }
00997 error *= fEventWeight->EvalInstance();
00998 error *= fCurrentTreeWeight;
00999 return error;
01000 }
01001
01002
01003 Double_t TMultiLayerPerceptron::GetError(TMultiLayerPerceptron::EDataSet set) const
01004 {
01005
01006 TEventList *list =
01007 ((set == TMultiLayerPerceptron::kTraining) ? fTraining : fTest);
01008 Double_t error = 0;
01009 Int_t i;
01010 if (list) {
01011 Int_t nEvents = list->GetN();
01012 for (i = 0; i < nEvents; i++) {
01013 error += GetError(list->GetEntry(i));
01014 }
01015 } else if (fData) {
01016 Int_t nEvents = (Int_t) fData->GetEntries();
01017 for (i = 0; i < nEvents; i++) {
01018 error += GetError(i);
01019 }
01020 }
01021 return error;
01022 }
01023
01024
01025 Double_t TMultiLayerPerceptron::GetSumSquareError() const
01026 {
01027
01028 Double_t error = 0;
01029 for (Int_t i = 0; i < fLastLayer.GetEntriesFast(); i++) {
01030 TNeuron *neuron = (TNeuron *) fLastLayer[i];
01031 error += neuron->GetError() * neuron->GetError();
01032 }
01033 return (error / 2.);
01034 }
01035
01036
01037 Double_t TMultiLayerPerceptron::GetCrossEntropyBinary() const
01038 {
01039
01040 Double_t error = 0;
01041 for (Int_t i = 0; i < fLastLayer.GetEntriesFast(); i++) {
01042 TNeuron *neuron = (TNeuron *) fLastLayer[i];
01043 Double_t output = neuron->GetValue();
01044 Double_t target = neuron->GetTarget();
01045 if (target < DBL_EPSILON) {
01046 if (output == 1.0)
01047 error = DBL_MAX;
01048 else
01049 error -= TMath::Log(1 - output);
01050 } else
01051 if ((1 - target) < DBL_EPSILON) {
01052 if (output == 0.0)
01053 error = DBL_MAX;
01054 else
01055 error -= TMath::Log(output);
01056 } else {
01057 if (output == 0.0 || output == 1.0)
01058 error = DBL_MAX;
01059 else
01060 error -= target * TMath::Log(output / target) + (1-target) * TMath::Log((1 - output)/(1 - target));
01061 }
01062 }
01063 return error;
01064 }
01065
01066
01067 Double_t TMultiLayerPerceptron::GetCrossEntropy() const
01068 {
01069
01070 Double_t error = 0;
01071 for (Int_t i = 0; i < fLastLayer.GetEntriesFast(); i++) {
01072 TNeuron *neuron = (TNeuron *) fLastLayer[i];
01073 Double_t output = neuron->GetValue();
01074 Double_t target = neuron->GetTarget();
01075 if (target > DBL_EPSILON) {
01076 if (output == 0.0)
01077 error = DBL_MAX;
01078 else
01079 error -= target * TMath::Log(output / target);
01080 }
01081 }
01082 return error;
01083 }
01084
01085
01086 void TMultiLayerPerceptron::ComputeDEDw() const
01087 {
01088
01089
01090 Int_t i,j;
01091 Int_t nentries = fSynapses.GetEntriesFast();
01092 TSynapse *synapse;
01093 for (i=0;i<nentries;i++) {
01094 synapse = (TSynapse *) fSynapses.UncheckedAt(i);
01095 synapse->SetDEDw(0.);
01096 }
01097 TNeuron *neuron;
01098 nentries = fNetwork.GetEntriesFast();
01099 for (i=0;i<nentries;i++) {
01100 neuron = (TNeuron *) fNetwork.UncheckedAt(i);
01101 neuron->SetDEDw(0.);
01102 }
01103 Double_t eventWeight = 1.;
01104 if (fTraining) {
01105 Int_t nEvents = fTraining->GetN();
01106 for (i = 0; i < nEvents; i++) {
01107 GetEntry(fTraining->GetEntry(i));
01108 eventWeight = fEventWeight->EvalInstance();
01109 eventWeight *= fCurrentTreeWeight;
01110 nentries = fSynapses.GetEntriesFast();
01111 for (j=0;j<nentries;j++) {
01112 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
01113 synapse->SetDEDw(synapse->GetDEDw() + (synapse->GetDeDw()*eventWeight));
01114 }
01115 nentries = fNetwork.GetEntriesFast();
01116 for (j=0;j<nentries;j++) {
01117 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
01118 neuron->SetDEDw(neuron->GetDEDw() + (neuron->GetDeDw()*eventWeight));
01119 }
01120 }
01121 nentries = fSynapses.GetEntriesFast();
01122 for (j=0;j<nentries;j++) {
01123 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
01124 synapse->SetDEDw(synapse->GetDEDw() / (Double_t) nEvents);
01125 }
01126 nentries = fNetwork.GetEntriesFast();
01127 for (j=0;j<nentries;j++) {
01128 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
01129 neuron->SetDEDw(neuron->GetDEDw() / (Double_t) nEvents);
01130 }
01131 } else if (fData) {
01132 Int_t nEvents = (Int_t) fData->GetEntries();
01133 for (i = 0; i < nEvents; i++) {
01134 GetEntry(i);
01135 eventWeight = fEventWeight->EvalInstance();
01136 eventWeight *= fCurrentTreeWeight;
01137 nentries = fSynapses.GetEntriesFast();
01138 for (j=0;j<nentries;j++) {
01139 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
01140 synapse->SetDEDw(synapse->GetDEDw() + (synapse->GetDeDw()*eventWeight));
01141 }
01142 nentries = fNetwork.GetEntriesFast();
01143 for (j=0;j<nentries;j++) {
01144 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
01145 neuron->SetDEDw(neuron->GetDEDw() + (neuron->GetDeDw()*eventWeight));
01146 }
01147 }
01148 nentries = fSynapses.GetEntriesFast();
01149 for (j=0;j<nentries;j++) {
01150 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
01151 synapse->SetDEDw(synapse->GetDEDw() / (Double_t) nEvents);
01152 }
01153 nentries = fNetwork.GetEntriesFast();
01154 for (j=0;j<nentries;j++) {
01155 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
01156 neuron->SetDEDw(neuron->GetDEDw() / (Double_t) nEvents);
01157 }
01158 }
01159 }
01160
01161
01162 void TMultiLayerPerceptron::Randomize() const
01163 {
01164
01165 Int_t nentries = fSynapses.GetEntriesFast();
01166 Int_t j;
01167 TSynapse *synapse;
01168 TNeuron *neuron;
01169 TTimeStamp ts;
01170 TRandom3 gen(ts.GetSec());
01171 for (j=0;j<nentries;j++) {
01172 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
01173 synapse->SetWeight(gen.Rndm() - 0.5);
01174 }
01175 nentries = fNetwork.GetEntriesFast();
01176 for (j=0;j<nentries;j++) {
01177 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
01178 neuron->SetWeight(gen.Rndm() - 0.5);
01179 }
01180 }
01181
01182
01183 void TMultiLayerPerceptron::AttachData()
01184 {
01185
01186
01187
01188
01189
01190
01191 Int_t j = 0;
01192 TNeuron *neuron = 0;
01193 Bool_t normalize = false;
01194 fManager = new TTreeFormulaManager;
01195
01196 const TString input = TString(fStructure(0, fStructure.First(':')));
01197 const TObjArray *inpL = input.Tokenize(", ");
01198 Int_t nentries = fFirstLayer.GetEntriesFast();
01199
01200 R__ASSERT(nentries == inpL->GetLast()+1);
01201 for (j=0;j<nentries;j++) {
01202 normalize = false;
01203 const TString brName = ((TObjString *)inpL->At(j))->GetString();
01204 neuron = (TNeuron *) fFirstLayer.UncheckedAt(j);
01205 if (brName[0]=='@')
01206 normalize = true;
01207 fManager->Add(neuron->UseBranch(fData,brName.Data() + (normalize?1:0)));
01208 if(!normalize) neuron->SetNormalisation(0., 1.);
01209 }
01210 delete inpL;
01211
01212
01213 TString output = TString(
01214 fStructure(fStructure.Last(':') + 1,
01215 fStructure.Length() - fStructure.Last(':')));
01216 const TObjArray *outL = output.Tokenize(", ");
01217 nentries = fLastLayer.GetEntriesFast();
01218
01219 R__ASSERT(nentries == outL->GetLast()+1);
01220 for (j=0;j<nentries;j++) {
01221 normalize = false;
01222 const TString brName = ((TObjString *)outL->At(j))->GetString();
01223 neuron = (TNeuron *) fLastLayer.UncheckedAt(j);
01224 if (brName[0]=='@')
01225 normalize = true;
01226 fManager->Add(neuron->UseBranch(fData,brName.Data() + (normalize?1:0)));
01227 if(!normalize) neuron->SetNormalisation(0., 1.);
01228 }
01229 delete outL;
01230
01231 fManager->Add((fEventWeight = new TTreeFormula("NNweight",fWeight.Data(),fData)));
01232
01233 }
01234
01235
01236 void TMultiLayerPerceptron::ExpandStructure()
01237 {
01238
01239 TString input = TString(fStructure(0, fStructure.First(':')));
01240 const TObjArray *inpL = input.Tokenize(", ");
01241 Int_t nneurons = inpL->GetLast()+1;
01242
01243 TString hiddenAndOutput = TString(
01244 fStructure(fStructure.First(':') + 1,
01245 fStructure.Length() - fStructure.First(':')));
01246 TString newInput;
01247 Int_t i = 0;
01248 TTreeFormula* f = 0;
01249
01250 for (i = 0; i<nneurons; i++) {
01251 const TString name = ((TObjString *)inpL->At(i))->GetString();
01252 f = new TTreeFormula("sizeTestFormula",name,fData);
01253
01254 if(f->GetMultiplicity()==1 && f->GetNdata()>1) {
01255 Warning("TMultiLayerPerceptron::ExpandStructure()","Variable size arrays cannot be used to build implicitely an input layer. The index 0 will be assumed.");
01256 }
01257
01258
01259
01260
01261
01262 else if(f->GetNdata()>1) {
01263 for(Int_t j=0; j<f->GetNdata(); j++) {
01264 if(i||j) newInput += ",";
01265 newInput += name;
01266 newInput += "{";
01267 newInput += j;
01268 newInput += "}";
01269 }
01270 continue;
01271 }
01272 if(i) newInput += ",";
01273 newInput += name;
01274 }
01275 delete inpL;
01276
01277
01278 fStructure = newInput + ":" + hiddenAndOutput;
01279 }
01280
01281
01282 void TMultiLayerPerceptron::BuildNetwork()
01283 {
01284
01285 ExpandStructure();
01286 TString input = TString(fStructure(0, fStructure.First(':')));
01287 TString hidden = TString(
01288 fStructure(fStructure.First(':') + 1,
01289 fStructure.Last(':') - fStructure.First(':') - 1));
01290 TString output = TString(
01291 fStructure(fStructure.Last(':') + 1,
01292 fStructure.Length() - fStructure.Last(':')));
01293 Int_t bll = atoi(TString(
01294 hidden(hidden.Last(':') + 1,
01295 hidden.Length() - (hidden.Last(':') + 1))).Data());
01296 if (input.Length() == 0) {
01297 Error("BuildNetwork()","malformed structure. No input layer.");
01298 return;
01299 }
01300 if (output.Length() == 0) {
01301 Error("BuildNetwork()","malformed structure. No output layer.");
01302 return;
01303 }
01304 BuildFirstLayer(input);
01305 BuildHiddenLayers(hidden);
01306 BuildLastLayer(output, bll);
01307 }
01308
01309
01310 void TMultiLayerPerceptron::BuildFirstLayer(TString & input)
01311 {
01312
01313
01314
01315
01316 const TObjArray *inpL = input.Tokenize(", ");
01317 const Int_t nneurons =inpL->GetLast()+1;
01318 TNeuron *neuron = 0;
01319 Int_t i = 0;
01320 for (i = 0; i<nneurons; i++) {
01321 const TString name = ((TObjString *)inpL->At(i))->GetString();
01322 neuron = new TNeuron(TNeuron::kOff, name);
01323 fFirstLayer.AddLast(neuron);
01324 fNetwork.AddLast(neuron);
01325 }
01326 delete inpL;
01327 }
01328
01329
01330 void TMultiLayerPerceptron::BuildHiddenLayers(TString & hidden)
01331 {
01332
01333 Int_t beg = 0;
01334 Int_t end = hidden.Index(":", beg + 1);
01335 Int_t prevStart = 0;
01336 Int_t prevStop = fNetwork.GetEntriesFast();
01337 Int_t layer = 1;
01338 while (end != -1) {
01339 BuildOneHiddenLayer(hidden(beg, end - beg), layer, prevStart, prevStop, false);
01340 beg = end + 1;
01341 end = hidden.Index(":", beg + 1);
01342 }
01343
01344 BuildOneHiddenLayer(hidden(beg, hidden.Length() - beg), layer, prevStart, prevStop, true);
01345 }
01346
01347
01348 void TMultiLayerPerceptron::BuildOneHiddenLayer(const TString& sNumNodes, Int_t& layer,
01349 Int_t& prevStart, Int_t& prevStop,
01350 Bool_t lastLayer)
01351 {
01352
01353 TNeuron *neuron = 0;
01354 TSynapse *synapse = 0;
01355 TString name;
01356 if (!sNumNodes.IsAlnum() || sNumNodes.IsAlpha()) {
01357 Error("BuildOneHiddenLayer",
01358 "The specification '%s' for hidden layer %d must contain only numbers!",
01359 sNumNodes.Data(), layer - 1);
01360 } else {
01361 Int_t num = atoi(sNumNodes.Data());
01362 for (Int_t i = 0; i < num; i++) {
01363 name.Form("HiddenL%d:N%d",layer,i);
01364 neuron = new TNeuron(fType, name, "", (const char*)fextF, (const char*)fextD);
01365 fNetwork.AddLast(neuron);
01366 for (Int_t j = prevStart; j < prevStop; j++) {
01367 synapse = new TSynapse((TNeuron *) fNetwork[j], neuron);
01368 fSynapses.AddLast(synapse);
01369 }
01370 }
01371
01372 if (!lastLayer) {
01373
01374 Int_t nEntries = fNetwork.GetEntriesFast();
01375 for (Int_t i = prevStop; i < nEntries; i++) {
01376 neuron = (TNeuron *) fNetwork[i];
01377 for (Int_t j = prevStop; j < nEntries; j++)
01378 neuron->AddInLayer((TNeuron *) fNetwork[j]);
01379 }
01380 }
01381
01382 prevStart = prevStop;
01383 prevStop = fNetwork.GetEntriesFast();
01384 layer++;
01385 }
01386 }
01387
01388
01389 void TMultiLayerPerceptron::BuildLastLayer(TString & output, Int_t prev)
01390 {
01391
01392
01393
01394
01395
01396 Int_t nneurons = output.CountChar(',')+1;
01397 if (fStructure.EndsWith("!")) {
01398 fStructure = TString(fStructure(0, fStructure.Length() - 1));
01399 if (nneurons == 1)
01400 fOutType = TNeuron::kSigmoid;
01401 else
01402 fOutType = TNeuron::kSoftmax;
01403 }
01404 Int_t prevStop = fNetwork.GetEntriesFast();
01405 Int_t prevStart = prevStop - prev;
01406 Ssiz_t pos = 0;
01407 TNeuron *neuron;
01408 TSynapse *synapse;
01409 TString name;
01410 Int_t i,j;
01411 for (i = 0; i<nneurons; i++) {
01412 Ssiz_t nextpos=output.Index(",",pos);
01413 if (nextpos!=kNPOS)
01414 name=output(pos,nextpos-pos);
01415 else name=output(pos,output.Length());
01416 pos+=nextpos+1;
01417 neuron = new TNeuron(fOutType, name);
01418 for (j = prevStart; j < prevStop; j++) {
01419 synapse = new TSynapse((TNeuron *) fNetwork[j], neuron);
01420 fSynapses.AddLast(synapse);
01421 }
01422 fLastLayer.AddLast(neuron);
01423 fNetwork.AddLast(neuron);
01424 }
01425
01426 Int_t nEntries = fNetwork.GetEntriesFast();
01427 for (i = prevStop; i < nEntries; i++) {
01428 neuron = (TNeuron *) fNetwork[i];
01429 for (j = prevStop; j < nEntries; j++)
01430 neuron->AddInLayer((TNeuron *) fNetwork[j]);
01431 }
01432
01433 }
01434
01435
01436 void TMultiLayerPerceptron::DrawResult(Int_t index, Option_t * option) const
01437 {
01438
01439
01440
01441
01442
01443
01444
01445
01446 TString opt = option;
01447 opt.ToLower();
01448 TNeuron *out = (TNeuron *) (fLastLayer.At(index));
01449 if (!out) {
01450 Error("DrawResult()","no such output.");
01451 return;
01452 }
01453
01454 if (!opt.Contains("nocanv"))
01455 new TCanvas("NNresult", "Neural Net output");
01456 const Double_t *norm = out->GetNormalisation();
01457 TEventList *events = 0;
01458 TString setname;
01459 Int_t i;
01460 if (opt.Contains("train")) {
01461 events = fTraining;
01462 setname = Form("train%d",index);
01463 } else if (opt.Contains("test")) {
01464 events = fTest;
01465 setname = Form("test%d",index);
01466 }
01467 if ((!fData) || (!events)) {
01468 Error("DrawResult()","no dataset.");
01469 return;
01470 }
01471 if (opt.Contains("comp")) {
01472
01473 TString title = "Neural Net Output control. ";
01474 title += setname;
01475 setname = "MLP_" + setname + "_comp";
01476 TH2D *hist = ((TH2D *) gDirectory->Get(setname.Data()));
01477 if (!hist)
01478 hist = new TH2D(setname.Data(), title.Data(), 50, -1, 1, 50, -1, 1);
01479 hist->Reset();
01480 Int_t nEvents = events->GetN();
01481 for (i = 0; i < nEvents; i++) {
01482 GetEntry(events->GetEntry(i));
01483 hist->Fill(out->GetValue(), (out->GetBranch() - norm[1]) / norm[0]);
01484 }
01485 hist->Draw();
01486 } else {
01487
01488 TString title = "Neural Net Output. ";
01489 title += setname;
01490 setname = "MLP_" + setname;
01491 TH1D *hist = ((TH1D *) gDirectory->Get(setname.Data()));
01492 if (!hist)
01493 hist = new TH1D(setname, title, 50, 1, -1);
01494 hist->Reset();
01495 Int_t nEvents = events->GetN();
01496 for (i = 0; i < nEvents; i++)
01497 hist->Fill(Result(events->GetEntry(i), index));
01498 hist->Draw();
01499 if (opt.Contains("train") && opt.Contains("test")) {
01500 events = fTraining;
01501 setname = "train";
01502 hist = ((TH1D *) gDirectory->Get("MLP_test"));
01503 if (!hist)
01504 hist = new TH1D(setname, title, 50, 1, -1);
01505 hist->Reset();
01506 nEvents = events->GetN();
01507 for (i = 0; i < nEvents; i++)
01508 hist->Fill(Result(events->GetEntry(i), index));
01509 hist->Draw("same");
01510 }
01511 }
01512 }
01513
01514
01515 void TMultiLayerPerceptron::DumpWeights(Option_t * filename) const
01516 {
01517
01518
01519 TString filen = filename;
01520 ostream * output;
01521 if (filen == "")
01522 return;
01523 if (filen == "-")
01524 output = &cout;
01525 else
01526 output = new ofstream(filen.Data());
01527 TNeuron *neuron = 0;
01528 *output << "#input normalization" << endl;
01529 Int_t nentries = fFirstLayer.GetEntriesFast();
01530 Int_t j=0;
01531 for (j=0;j<nentries;j++) {
01532 neuron = (TNeuron *) fFirstLayer.UncheckedAt(j);
01533 *output << neuron->GetNormalisation()[0] << " "
01534 << neuron->GetNormalisation()[1] << endl;
01535 }
01536 *output << "#output normalization" << endl;
01537 nentries = fLastLayer.GetEntriesFast();
01538 for (j=0;j<nentries;j++) {
01539 neuron = (TNeuron *) fLastLayer.UncheckedAt(j);
01540 *output << neuron->GetNormalisation()[0] << " "
01541 << neuron->GetNormalisation()[1] << endl;
01542 }
01543 *output << "#neurons weights" << endl;
01544 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
01545 while ((neuron = (TNeuron *) it->Next()))
01546 *output << neuron->GetWeight() << endl;
01547 delete it;
01548 it = (TObjArrayIter *) fSynapses.MakeIterator();
01549 TSynapse *synapse = 0;
01550 *output << "#synapses weights" << endl;
01551 while ((synapse = (TSynapse *) it->Next()))
01552 *output << synapse->GetWeight() << endl;
01553 delete it;
01554 if (filen != "-") {
01555 ((ofstream *) output)->close();
01556 delete output;
01557 }
01558 }
01559
01560
01561 void TMultiLayerPerceptron::LoadWeights(Option_t * filename)
01562 {
01563
01564
01565 TString filen = filename;
01566 Double_t w;
01567 if (filen == "")
01568 return;
01569 char *buff = new char[100];
01570 ifstream input(filen.Data());
01571
01572 input.getline(buff, 100);
01573 TObjArrayIter *it = (TObjArrayIter *) fFirstLayer.MakeIterator();
01574 Float_t n1,n2;
01575 TNeuron *neuron = 0;
01576 while ((neuron = (TNeuron *) it->Next())) {
01577 input >> n1 >> n2;
01578 neuron->SetNormalisation(n2,n1);
01579 }
01580 input.getline(buff, 100);
01581
01582 input.getline(buff, 100);
01583 delete it;
01584 it = (TObjArrayIter *) fLastLayer.MakeIterator();
01585 while ((neuron = (TNeuron *) it->Next())) {
01586 input >> n1 >> n2;
01587 neuron->SetNormalisation(n2,n1);
01588 }
01589 input.getline(buff, 100);
01590
01591 input.getline(buff, 100);
01592 delete it;
01593 it = (TObjArrayIter *) fNetwork.MakeIterator();
01594 while ((neuron = (TNeuron *) it->Next())) {
01595 input >> w;
01596 neuron->SetWeight(w);
01597 }
01598 delete it;
01599 input.getline(buff, 100);
01600
01601 input.getline(buff, 100);
01602 it = (TObjArrayIter *) fSynapses.MakeIterator();
01603 TSynapse *synapse = 0;
01604 while ((synapse = (TSynapse *) it->Next())) {
01605 input >> w;
01606 synapse->SetWeight(w);
01607 }
01608 delete it;
01609 delete[] buff;
01610 }
01611
01612
01613 Double_t TMultiLayerPerceptron::Evaluate(Int_t index, Double_t *params) const
01614 {
01615
01616
01617
01618 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
01619 TNeuron *neuron;
01620 while ((neuron = (TNeuron *) it->Next()))
01621 neuron->SetNewEvent();
01622 delete it;
01623 it = (TObjArrayIter *) fFirstLayer.MakeIterator();
01624 Int_t i=0;
01625 while ((neuron = (TNeuron *) it->Next()))
01626 neuron->ForceExternalValue(params[i++]);
01627 delete it;
01628 TNeuron *out = (TNeuron *) (fLastLayer.At(index));
01629 if (out)
01630 return out->GetValue();
01631 else
01632 return 0;
01633 }
01634
01635
01636 void TMultiLayerPerceptron::Export(Option_t * filename, Option_t * language) const
01637 {
01638
01639
01640
01641
01642
01643 TString lg = language;
01644 lg.ToUpper();
01645 Int_t i;
01646 if(GetType()==TNeuron::kExternal) {
01647 Warning("TMultiLayerPerceptron::Export","Request to export a network using an external function");
01648 }
01649 if (lg == "C++") {
01650 TString basefilename = filename;
01651 Int_t slash = basefilename.Last('/')+1;
01652 if (slash) basefilename = TString(basefilename(slash, basefilename.Length()-slash));
01653
01654 TString classname = basefilename;
01655 TString header = filename;
01656 header += ".h";
01657 TString source = filename;
01658 source += ".cxx";
01659 ofstream headerfile(header);
01660 ofstream sourcefile(source);
01661 headerfile << "#ifndef " << basefilename << "_h" << endl;
01662 headerfile << "#define " << basefilename << "_h" << endl << endl;
01663 headerfile << "class " << classname << " { " << endl;
01664 headerfile << "public:" << endl;
01665 headerfile << " " << classname << "() {}" << endl;
01666 headerfile << " ~" << classname << "() {}" << endl;
01667 sourcefile << "#include \"" << header << "\"" << endl;
01668 sourcefile << "#include <cmath>" << endl << endl;
01669 headerfile << " double Value(int index";
01670 sourcefile << "double " << classname << "::Value(int index";
01671 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++) {
01672 headerfile << ",double in" << i;
01673 sourcefile << ",double in" << i;
01674 }
01675 headerfile << ");" << endl;
01676 sourcefile << ") {" << endl;
01677 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++)
01678 sourcefile << " input" << i << " = (in" << i << " - "
01679 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[1] << ")/"
01680 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[0] << ";"
01681 << endl;
01682 sourcefile << " switch(index) {" << endl;
01683 TNeuron *neuron;
01684 TObjArrayIter *it = (TObjArrayIter *) fLastLayer.MakeIterator();
01685 Int_t idx = 0;
01686 while ((neuron = (TNeuron *) it->Next()))
01687 sourcefile << " case " << idx++ << ":" << endl
01688 << " return neuron" << neuron << "();" << endl;
01689 sourcefile << " default:" << endl
01690 << " return 0.;" << endl << " }"
01691 << endl;
01692 sourcefile << "}" << endl << endl;
01693 headerfile << " double Value(int index, double* input);" << endl;
01694 sourcefile << "double " << classname << "::Value(int index, double* input) {" << endl;
01695 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++)
01696 sourcefile << " input" << i << " = (input[" << i << "] - "
01697 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[1] << ")/"
01698 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[0] << ";"
01699 << endl;
01700 sourcefile << " switch(index) {" << endl;
01701 delete it;
01702 it = (TObjArrayIter *) fLastLayer.MakeIterator();
01703 idx = 0;
01704 while ((neuron = (TNeuron *) it->Next()))
01705 sourcefile << " case " << idx++ << ":" << endl
01706 << " return neuron" << neuron << "();" << endl;
01707 sourcefile << " default:" << endl
01708 << " return 0.;" << endl << " }"
01709 << endl;
01710 sourcefile << "}" << endl << endl;
01711 headerfile << "private:" << endl;
01712 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++)
01713 headerfile << " double input" << i << ";" << endl;
01714 delete it;
01715 it = (TObjArrayIter *) fNetwork.MakeIterator();
01716 idx = 0;
01717 while ((neuron = (TNeuron *) it->Next())) {
01718 if (!neuron->GetPre(0)) {
01719 headerfile << " double neuron" << neuron << "();" << endl;
01720 sourcefile << "double " << classname << "::neuron" << neuron
01721 << "() {" << endl;
01722 sourcefile << " return input" << idx++ << ";" << endl;
01723 sourcefile << "}" << endl << endl;
01724 } else {
01725 headerfile << " double input" << neuron << "();" << endl;
01726 sourcefile << "double " << classname << "::input" << neuron
01727 << "() {" << endl;
01728 sourcefile << " double input = " << neuron->GetWeight()
01729 << ";" << endl;
01730 TSynapse *syn = 0;
01731 Int_t n = 0;
01732 while ((syn = neuron->GetPre(n++))) {
01733 sourcefile << " input += synapse" << syn << "();" << endl;
01734 }
01735 sourcefile << " return input;" << endl;
01736 sourcefile << "}" << endl << endl;
01737
01738 headerfile << " double neuron" << neuron << "();" << endl;
01739 sourcefile << "double " << classname << "::neuron" << neuron << "() {" << endl;
01740 sourcefile << " double input = input" << neuron << "();" << endl;
01741 switch(neuron->GetType()) {
01742 case (TNeuron::kSigmoid):
01743 {
01744 sourcefile << " return ((input < -709. ? 0. : (1/(1+exp(-input)))) * ";
01745 break;
01746 }
01747 case (TNeuron::kLinear):
01748 {
01749 sourcefile << " return (input * ";
01750 break;
01751 }
01752 case (TNeuron::kTanh):
01753 {
01754 sourcefile << " return (tanh(input) * ";
01755 break;
01756 }
01757 case (TNeuron::kGauss):
01758 {
01759 sourcefile << " return (exp(-input*input) * ";
01760 break;
01761 }
01762 case (TNeuron::kSoftmax):
01763 {
01764 sourcefile << " return (exp(input) / (";
01765 Int_t nn = 0;
01766 TNeuron* side = neuron->GetInLayer(nn++);
01767 sourcefile << "exp(input" << side << "())";
01768 while ((side = neuron->GetInLayer(nn++)))
01769 sourcefile << " + exp(input" << side << "())";
01770 sourcefile << ") * ";
01771 break;
01772 }
01773 default:
01774 {
01775 sourcefile << " return (0.0 * ";
01776 }
01777 }
01778 sourcefile << neuron->GetNormalisation()[0] << ")+" ;
01779 sourcefile << neuron->GetNormalisation()[1] << ";" << endl;
01780 sourcefile << "}" << endl << endl;
01781 }
01782 }
01783 delete it;
01784 TSynapse *synapse = 0;
01785 it = (TObjArrayIter *) fSynapses.MakeIterator();
01786 while ((synapse = (TSynapse *) it->Next())) {
01787 headerfile << " double synapse" << synapse << "();" << endl;
01788 sourcefile << "double " << classname << "::synapse"
01789 << synapse << "() {" << endl;
01790 sourcefile << " return (neuron" << synapse->GetPre()
01791 << "()*" << synapse->GetWeight() << ");" << endl;
01792 sourcefile << "}" << endl << endl;
01793 }
01794 delete it;
01795 headerfile << "};" << endl << endl;
01796 headerfile << "#endif // " << basefilename << "_h" << endl << endl;
01797 headerfile.close();
01798 sourcefile.close();
01799 cout << header << " and " << source << " created." << endl;
01800 }
01801 else if(lg == "FORTRAN") {
01802 TString implicit = " implicit double precision (a-h,n-z)\n";
01803 ofstream sigmoid("sigmoid.f");
01804 sigmoid << " double precision FUNCTION SIGMOID(X)" << endl
01805 << implicit
01806 << " IF(X.GT.37.) THEN" << endl
01807 << " SIGMOID = 1." << endl
01808 << " ELSE IF(X.LT.-709.) THEN" << endl
01809 << " SIGMOID = 0." << endl
01810 << " ELSE" << endl
01811 << " SIGMOID = 1./(1.+EXP(-X))" << endl
01812 << " ENDIF" << endl
01813 << " END" << endl;
01814 sigmoid.close();
01815 TString source = filename;
01816 source += ".f";
01817 ofstream sourcefile(source);
01818
01819
01820 sourcefile << " double precision function " << filename
01821 << "(x, index)" << endl;
01822 sourcefile << implicit;
01823 sourcefile << " double precision x(" <<
01824 fFirstLayer.GetEntriesFast() << ")" << endl << endl;
01825
01826
01827 sourcefile << "C --- Last Layer" << endl;
01828 TNeuron *neuron;
01829 TObjArrayIter *it = (TObjArrayIter *) fLastLayer.MakeIterator();
01830 Int_t idx = 0;
01831 TString ifelseif = " if (index.eq.";
01832 while ((neuron = (TNeuron *) it->Next())) {
01833 sourcefile << ifelseif.Data() << idx++ << ") then" << endl
01834 << " " << filename
01835 << "=neuron" << neuron << "(x);" << endl;
01836 ifelseif = " else if (index.eq.";
01837 }
01838 sourcefile << " else" << endl
01839 << " " << filename << "=0.d0" << endl
01840 << " endif" << endl;
01841 sourcefile << " end" << endl;
01842
01843
01844 sourcefile << "C --- First and Hidden layers" << endl;
01845 delete it;
01846 it = (TObjArrayIter *) fNetwork.MakeIterator();
01847 idx = 0;
01848 while ((neuron = (TNeuron *) it->Next())) {
01849 sourcefile << " double precision function neuron"
01850 << neuron << "(x)" << endl
01851 << implicit;
01852 sourcefile << " double precision x("
01853 << fFirstLayer.GetEntriesFast() << ")" << endl << endl;
01854 if (!neuron->GetPre(0)) {
01855 sourcefile << " neuron" << neuron
01856 << " = (x(" << idx+1 << ") - "
01857 << ((TNeuron *) fFirstLayer[idx])->GetNormalisation()[1]
01858 << "d0)/"
01859 << ((TNeuron *) fFirstLayer[idx])->GetNormalisation()[0]
01860 << "d0" << endl;
01861 idx++;
01862 } else {
01863 sourcefile << " neuron" << neuron
01864 << " = " << neuron->GetWeight() << "d0" << endl;
01865 TSynapse *syn;
01866 Int_t n = 0;
01867 while ((syn = neuron->GetPre(n++)))
01868 sourcefile << " neuron" << neuron
01869 << " = neuron" << neuron
01870 << " + synapse" << syn << "(x)" << endl;
01871 switch(neuron->GetType()) {
01872 case (TNeuron::kSigmoid):
01873 {
01874 sourcefile << " neuron" << neuron
01875 << "= (sigmoid(neuron" << neuron << ")*";
01876 break;
01877 }
01878 case (TNeuron::kLinear):
01879 {
01880 break;
01881 }
01882 case (TNeuron::kTanh):
01883 {
01884 sourcefile << " neuron" << neuron
01885 << "= (tanh(neuron" << neuron << ")*";
01886 break;
01887 }
01888 case (TNeuron::kGauss):
01889 {
01890 sourcefile << " neuron" << neuron
01891 << "= (exp(-neuron" << neuron << "*neuron"
01892 << neuron << "))*";
01893 break;
01894 }
01895 case (TNeuron::kSoftmax):
01896 {
01897 Int_t nn = 0;
01898 TNeuron* side = neuron->GetInLayer(nn++);
01899 sourcefile << " div = exp(neuron" << side << "())" << endl;
01900 while ((side = neuron->GetInLayer(nn++)))
01901 sourcefile << " div = div + exp(neuron" << side << "())" << endl;
01902 sourcefile << " neuron" << neuron ;
01903 sourcefile << "= (exp(neuron" << neuron << ") / div * ";
01904 break;
01905 }
01906 default:
01907 {
01908 sourcefile << " neuron " << neuron << "= 0.";
01909 }
01910 }
01911 sourcefile << neuron->GetNormalisation()[0] << "d0)+" ;
01912 sourcefile << neuron->GetNormalisation()[1] << "d0" << endl;
01913 }
01914 sourcefile << " end" << endl;
01915 }
01916 delete it;
01917
01918
01919 sourcefile << "C --- Synapses" << endl;
01920 TSynapse *synapse = 0;
01921 it = (TObjArrayIter *) fSynapses.MakeIterator();
01922 while ((synapse = (TSynapse *) it->Next())) {
01923 sourcefile << " double precision function " << "synapse"
01924 << synapse << "(x)\n" << implicit;
01925 sourcefile << " double precision x("
01926 << fFirstLayer.GetEntriesFast() << ")" << endl << endl;
01927 sourcefile << " synapse" << synapse
01928 << "=neuron" << synapse->GetPre()
01929 << "(x)*" << synapse->GetWeight() << "d0" << endl;
01930 sourcefile << " end" << endl << endl;
01931 }
01932 delete it;
01933 sourcefile.close();
01934 cout << source << " created." << endl;
01935 }
01936 else if(lg == "PYTHON") {
01937 TString classname = filename;
01938 TString pyfile = filename;
01939 pyfile += ".py";
01940 ofstream pythonfile(pyfile);
01941 pythonfile << "from math import exp" << endl << endl;
01942 pythonfile << "from math import tanh" << endl << endl;
01943 pythonfile << "class " << classname << ":" << endl;
01944 pythonfile << "\tdef value(self,index";
01945 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++) {
01946 pythonfile << ",in" << i;
01947 }
01948 pythonfile << "):" << endl;
01949 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++)
01950 pythonfile << "\t\tself.input" << i << " = (in" << i << " - "
01951 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[1] << ")/"
01952 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[0] << endl;
01953 TNeuron *neuron;
01954 TObjArrayIter *it = (TObjArrayIter *) fLastLayer.MakeIterator();
01955 Int_t idx = 0;
01956 while ((neuron = (TNeuron *) it->Next()))
01957 pythonfile << "\t\tif index==" << idx++
01958 << ": return self.neuron" << neuron << "();" << endl;
01959 pythonfile << "\t\treturn 0." << endl;
01960 delete it;
01961 it = (TObjArrayIter *) fNetwork.MakeIterator();
01962 idx = 0;
01963 while ((neuron = (TNeuron *) it->Next())) {
01964 pythonfile << "\tdef neuron" << neuron << "(self):" << endl;
01965 if (!neuron->GetPre(0))
01966 pythonfile << "\t\treturn self.input" << idx++ << endl;
01967 else {
01968 pythonfile << "\t\tinput = " << neuron->GetWeight() << endl;
01969 TSynapse *syn;
01970 Int_t n = 0;
01971 while ((syn = neuron->GetPre(n++)))
01972 pythonfile << "\t\tinput = input + self.synapse"
01973 << syn << "()" << endl;
01974 switch(neuron->GetType()) {
01975 case (TNeuron::kSigmoid):
01976 {
01977 pythonfile << "\t\tif input<-709. : return " << neuron->GetNormalisation()[1] << endl;
01978 pythonfile << "\t\treturn ((1/(1+exp(-input)))*";
01979 break;
01980 }
01981 case (TNeuron::kLinear):
01982 {
01983 pythonfile << "\t\treturn (input*";
01984 break;
01985 }
01986 case (TNeuron::kTanh):
01987 {
01988 pythonfile << "\t\treturn (tanh(input)*";
01989 break;
01990 }
01991 case (TNeuron::kGauss):
01992 {
01993 pythonfile << "\t\treturn (exp(-input*input)*";
01994 break;
01995 }
01996 case (TNeuron::kSoftmax):
01997 {
01998 pythonfile << "\t\treturn (exp(input) / (";
01999 Int_t nn = 0;
02000 TNeuron* side = neuron->GetInLayer(nn++);
02001 pythonfile << "exp(self.neuron" << side << "())";
02002 while ((side = neuron->GetInLayer(nn++)))
02003 pythonfile << " + exp(self.neuron" << side << "())";
02004 pythonfile << ") * ";
02005 break;
02006 }
02007 default:
02008 {
02009 pythonfile << "\t\treturn 0.";
02010 }
02011 }
02012 pythonfile << neuron->GetNormalisation()[0] << ")+" ;
02013 pythonfile << neuron->GetNormalisation()[1] << endl;
02014 }
02015 }
02016 delete it;
02017 TSynapse *synapse = 0;
02018 it = (TObjArrayIter *) fSynapses.MakeIterator();
02019 while ((synapse = (TSynapse *) it->Next())) {
02020 pythonfile << "\tdef synapse" << synapse << "(self):" << endl;
02021 pythonfile << "\t\treturn (self.neuron" << synapse->GetPre()
02022 << "()*" << synapse->GetWeight() << ")" << endl;
02023 }
02024 delete it;
02025 pythonfile.close();
02026 cout << pyfile << " created." << endl;
02027 }
02028 }
02029
02030
02031 void TMultiLayerPerceptron::Shuffle(Int_t * index, Int_t n) const
02032 {
02033
02034
02035
02036
02037
02038
02039
02040
02041 TTimeStamp ts;
02042 TRandom3 rnd(ts.GetSec());
02043 Int_t j, k;
02044 Int_t a = n - 1;
02045 for (Int_t i = 0; i < n; i++) {
02046 j = (Int_t) (rnd.Rndm() * a);
02047 k = index[j];
02048 index[j] = index[i];
02049 index[i] = k;
02050 }
02051 return;
02052 }
02053
02054
02055 void TMultiLayerPerceptron::MLP_Stochastic(Double_t * buffer)
02056 {
02057
02058
02059
02060 Int_t nEvents = fTraining->GetN();
02061 Int_t *index = new Int_t[nEvents];
02062 Int_t i,j,nentries;
02063 for (i = 0; i < nEvents; i++)
02064 index[i] = i;
02065 fEta *= fEtaDecay;
02066 Shuffle(index, nEvents);
02067 TNeuron *neuron;
02068 TSynapse *synapse;
02069 for (i = 0; i < nEvents; i++) {
02070 GetEntry(fTraining->GetEntry(index[i]));
02071
02072
02073 nentries = fFirstLayer.GetEntriesFast();
02074 for (j=0;j<nentries;j++) {
02075 neuron = (TNeuron *) fFirstLayer.UncheckedAt(j);
02076 neuron->GetDeDw();
02077 }
02078 Int_t cnt = 0;
02079
02080 nentries = fNetwork.GetEntriesFast();
02081 for (j=0;j<nentries;j++) {
02082 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
02083 buffer[cnt] = (-fEta) * (neuron->GetDeDw() + fDelta)
02084 + fEpsilon * buffer[cnt];
02085 neuron->SetWeight(neuron->GetWeight() + buffer[cnt++]);
02086 }
02087
02088 nentries = fSynapses.GetEntriesFast();
02089 for (j=0;j<nentries;j++) {
02090 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
02091 buffer[cnt] = (-fEta) * (synapse->GetDeDw() + fDelta)
02092 + fEpsilon * buffer[cnt];
02093 synapse->SetWeight(synapse->GetWeight() + buffer[cnt++]);
02094 }
02095 }
02096 delete[]index;
02097 }
02098
02099
02100 void TMultiLayerPerceptron::MLP_Batch(Double_t * buffer)
02101 {
02102
02103
02104
02105 fEta *= fEtaDecay;
02106 Int_t cnt = 0;
02107 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
02108 TNeuron *neuron = 0;
02109
02110 while ((neuron = (TNeuron *) it->Next())) {
02111 buffer[cnt] = (-fEta) * (neuron->GetDEDw() + fDelta)
02112 + fEpsilon * buffer[cnt];
02113 neuron->SetWeight(neuron->GetWeight() + buffer[cnt++]);
02114 }
02115 delete it;
02116 it = (TObjArrayIter *) fSynapses.MakeIterator();
02117 TSynapse *synapse = 0;
02118
02119 while ((synapse = (TSynapse *) it->Next())) {
02120 buffer[cnt] = (-fEta) * (synapse->GetDEDw() + fDelta)
02121 + fEpsilon * buffer[cnt];
02122 synapse->SetWeight(synapse->GetWeight() + buffer[cnt++]);
02123 }
02124 delete it;
02125 }
02126
02127
02128 void TMultiLayerPerceptron::MLP_Line(Double_t * origin, Double_t * dir, Double_t dist)
02129 {
02130
02131
02132
02133 Int_t idx = 0;
02134 TNeuron *neuron = 0;
02135 TSynapse *synapse = 0;
02136 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
02137 while ((neuron = (TNeuron *) it->Next())) {
02138 neuron->SetWeight(origin[idx] + (dir[idx] * dist));
02139 idx++;
02140 }
02141 delete it;
02142 it = (TObjArrayIter *) fSynapses.MakeIterator();
02143 while ((synapse = (TSynapse *) it->Next())) {
02144 synapse->SetWeight(origin[idx] + (dir[idx] * dist));
02145 idx++;
02146 }
02147 delete it;
02148 }
02149
02150
02151 void TMultiLayerPerceptron::SteepestDir(Double_t * dir)
02152 {
02153
02154 Int_t idx = 0;
02155 TNeuron *neuron = 0;
02156 TSynapse *synapse = 0;
02157 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
02158 while ((neuron = (TNeuron *) it->Next()))
02159 dir[idx++] = -neuron->GetDEDw();
02160 delete it;
02161 it = (TObjArrayIter *) fSynapses.MakeIterator();
02162 while ((synapse = (TSynapse *) it->Next()))
02163 dir[idx++] = -synapse->GetDEDw();
02164 delete it;
02165 }
02166
02167
02168 bool TMultiLayerPerceptron::LineSearch(Double_t * direction, Double_t * buffer)
02169 {
02170
02171
02172
02173
02174
02175 Int_t idx = 0;
02176 Int_t j,nentries;
02177 TNeuron *neuron = 0;
02178 TSynapse *synapse = 0;
02179
02180 Double_t *origin = new Double_t[fNetwork.GetEntriesFast() +
02181 fSynapses.GetEntriesFast()];
02182 nentries = fNetwork.GetEntriesFast();
02183 for (j=0;j<nentries;j++) {
02184 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
02185 origin[idx++] = neuron->GetWeight();
02186 }
02187 nentries = fSynapses.GetEntriesFast();
02188 for (j=0;j<nentries;j++) {
02189 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
02190 origin[idx++] = synapse->GetWeight();
02191 }
02192
02193
02194 Double_t err1 = GetError(kTraining);
02195 Double_t alpha1 = 0.;
02196 Double_t alpha2 = fLastAlpha;
02197 if (alpha2 < 0.01)
02198 alpha2 = 0.01;
02199 if (alpha2 > 2.0)
02200 alpha2 = 2.0;
02201 Double_t alpha3 = alpha2;
02202 MLP_Line(origin, direction, alpha2);
02203 Double_t err2 = GetError(kTraining);
02204 Double_t err3 = err2;
02205 Bool_t bingo = false;
02206 Int_t icount;
02207 if (err1 > err2) {
02208 for (icount = 0; icount < 100; icount++) {
02209 alpha3 *= fTau;
02210 MLP_Line(origin, direction, alpha3);
02211 err3 = GetError(kTraining);
02212 if (err3 > err2) {
02213 bingo = true;
02214 break;
02215 }
02216 alpha1 = alpha2;
02217 err1 = err2;
02218 alpha2 = alpha3;
02219 err2 = err3;
02220 }
02221 if (!bingo) {
02222 MLP_Line(origin, direction, 0.);
02223 delete[]origin;
02224 return true;
02225 }
02226 } else {
02227 for (icount = 0; icount < 100; icount++) {
02228 alpha2 /= fTau;
02229 MLP_Line(origin, direction, alpha2);
02230 err2 = GetError(kTraining);
02231 if (err1 > err2) {
02232 bingo = true;
02233 break;
02234 }
02235 alpha3 = alpha2;
02236 err3 = err2;
02237 }
02238 if (!bingo) {
02239 MLP_Line(origin, direction, 0.);
02240 delete[]origin;
02241 fLastAlpha = 0.05;
02242 return true;
02243 }
02244 }
02245
02246 fLastAlpha = 0.5 * (alpha1 + alpha3 -
02247 (err3 - err1) / ((err3 - err2) / (alpha3 - alpha2)
02248 - (err2 - err1) / (alpha2 - alpha1)));
02249 fLastAlpha = fLastAlpha < 10000 ? fLastAlpha : 10000;
02250 MLP_Line(origin, direction, fLastAlpha);
02251 GetError(kTraining);
02252
02253 idx = 0;
02254 nentries = fNetwork.GetEntriesFast();
02255 for (j=0;j<nentries;j++) {
02256 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
02257 buffer[idx] = neuron->GetWeight() - origin[idx];
02258 idx++;
02259 }
02260 nentries = fSynapses.GetEntriesFast();
02261 for (j=0;j<nentries;j++) {
02262 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
02263 buffer[idx] = synapse->GetWeight() - origin[idx];
02264 idx++;
02265 }
02266 delete[]origin;
02267 return false;
02268 }
02269
02270
02271 void TMultiLayerPerceptron::ConjugateGradientsDir(Double_t * dir, Double_t beta)
02272 {
02273
02274
02275
02276
02277
02278 Int_t idx = 0;
02279 Int_t j,nentries;
02280 TNeuron *neuron = 0;
02281 TSynapse *synapse = 0;
02282 nentries = fNetwork.GetEntriesFast();
02283 for (j=0;j<nentries;j++) {
02284 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
02285 dir[idx] = -neuron->GetDEDw() + beta * dir[idx];
02286 idx++;
02287 }
02288 nentries = fSynapses.GetEntriesFast();
02289 for (j=0;j<nentries;j++) {
02290 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
02291 dir[idx] = -synapse->GetDEDw() + beta * dir[idx];
02292 idx++;
02293 }
02294 }
02295
02296
02297 bool TMultiLayerPerceptron::GetBFGSH(TMatrixD & bfgsh, TMatrixD & gamma, TMatrixD & delta)
02298 {
02299
02300
02301
02302
02303
02304 TMatrixD gd(gamma, TMatrixD::kTransposeMult, delta);
02305 if ((Double_t) gd[0][0] == 0.)
02306 return true;
02307 TMatrixD aHg(bfgsh, TMatrixD::kMult, gamma);
02308 TMatrixD tmp(gamma, TMatrixD::kTransposeMult, bfgsh);
02309 TMatrixD gHg(gamma, TMatrixD::kTransposeMult, aHg);
02310 Double_t a = 1 / (Double_t) gd[0][0];
02311 Double_t f = 1 + ((Double_t) gHg[0][0] * a);
02312 TMatrixD res( TMatrixD(delta, TMatrixD::kMult,
02313 TMatrixD(TMatrixD::kTransposed, delta)));
02314 res *= f;
02315 res -= (TMatrixD(delta, TMatrixD::kMult, tmp) +
02316 TMatrixD(aHg, TMatrixD::kMult,
02317 TMatrixD(TMatrixD::kTransposed, delta)));
02318 res *= a;
02319 bfgsh += res;
02320 return false;
02321 }
02322
02323
02324 void TMultiLayerPerceptron::SetGammaDelta(TMatrixD & gamma, TMatrixD & delta,
02325 Double_t * buffer)
02326 {
02327
02328
02329
02330
02331 Int_t els = fNetwork.GetEntriesFast() + fSynapses.GetEntriesFast();
02332 Int_t idx = 0;
02333 Int_t j,nentries;
02334 TNeuron *neuron = 0;
02335 TSynapse *synapse = 0;
02336 nentries = fNetwork.GetEntriesFast();
02337 for (j=0;j<nentries;j++) {
02338 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
02339 gamma[idx++][0] = -neuron->GetDEDw();
02340 }
02341 nentries = fSynapses.GetEntriesFast();
02342 for (j=0;j<nentries;j++) {
02343 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
02344 gamma[idx++][0] = -synapse->GetDEDw();
02345 }
02346 for (Int_t i = 0; i < els; i++)
02347 delta[i] = buffer[i];
02348
02349 ComputeDEDw();
02350 idx = 0;
02351 nentries = fNetwork.GetEntriesFast();
02352 for (j=0;j<nentries;j++) {
02353 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
02354 gamma[idx++][0] += neuron->GetDEDw();
02355 }
02356 nentries = fSynapses.GetEntriesFast();
02357 for (j=0;j<nentries;j++) {
02358 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
02359 gamma[idx++][0] += synapse->GetDEDw();
02360 }
02361 }
02362
02363
02364 Double_t TMultiLayerPerceptron::DerivDir(Double_t * dir)
02365 {
02366
02367
02368
02369 Int_t idx = 0;
02370 Int_t j,nentries;
02371 Double_t output = 0;
02372 TNeuron *neuron = 0;
02373 TSynapse *synapse = 0;
02374 nentries = fNetwork.GetEntriesFast();
02375 for (j=0;j<nentries;j++) {
02376 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
02377 output += neuron->GetDEDw() * dir[idx++];
02378 }
02379 nentries = fSynapses.GetEntriesFast();
02380 for (j=0;j<nentries;j++) {
02381 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
02382 output += synapse->GetDEDw() * dir[idx++];
02383 }
02384 return output;
02385 }
02386
02387
02388 void TMultiLayerPerceptron::BFGSDir(TMatrixD & bfgsh, Double_t * dir)
02389 {
02390
02391
02392
02393 Int_t els = fNetwork.GetEntriesFast() + fSynapses.GetEntriesFast();
02394 TMatrixD dedw(els, 1);
02395 Int_t idx = 0;
02396 Int_t j,nentries;
02397 TNeuron *neuron = 0;
02398 TSynapse *synapse = 0;
02399 nentries = fNetwork.GetEntriesFast();
02400 for (j=0;j<nentries;j++) {
02401 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
02402 dedw[idx++][0] = neuron->GetDEDw();
02403 }
02404 nentries = fSynapses.GetEntriesFast();
02405 for (j=0;j<nentries;j++) {
02406 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
02407 dedw[idx++][0] = synapse->GetDEDw();
02408 }
02409 TMatrixD direction(bfgsh, TMatrixD::kMult, dedw);
02410 for (Int_t i = 0; i < els; i++)
02411 dir[i] = -direction[i][0];
02412
02413 }
02414
02415
02416 void TMultiLayerPerceptron::Draw(Option_t * )
02417 {
02418
02419
02420
02421
02422
02423 #define NeuronSize 2.5
02424
02425 Int_t nLayers = fStructure.CountChar(':')+1;
02426 Float_t xStep = 1./(nLayers+1.);
02427 Int_t layer;
02428 for(layer=0; layer< nLayers-1; layer++) {
02429 Float_t nNeurons_this = 0;
02430 if(layer==0) {
02431 TString input = TString(fStructure(0, fStructure.First(':')));
02432 nNeurons_this = input.CountChar(',')+1;
02433 }
02434 else {
02435 Int_t cnt=0;
02436 TString hidden = TString(fStructure(fStructure.First(':') + 1,fStructure.Last(':') - fStructure.First(':') - 1));
02437 Int_t beg = 0;
02438 Int_t end = hidden.Index(":", beg + 1);
02439 while (end != -1) {
02440 Int_t num = atoi(TString(hidden(beg, end - beg)).Data());
02441 cnt++;
02442 beg = end + 1;
02443 end = hidden.Index(":", beg + 1);
02444 if(layer==cnt) nNeurons_this = num;
02445 }
02446 Int_t num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
02447 cnt++;
02448 if(layer==cnt) nNeurons_this = num;
02449 }
02450 Float_t nNeurons_next = 0;
02451 if(layer==nLayers-2) {
02452 TString output = TString(fStructure(fStructure.Last(':') + 1,fStructure.Length() - fStructure.Last(':')));
02453 nNeurons_next = output.CountChar(',')+1;
02454 }
02455 else {
02456 Int_t cnt=0;
02457 TString hidden = TString(fStructure(fStructure.First(':') + 1,fStructure.Last(':') - fStructure.First(':') - 1));
02458 Int_t beg = 0;
02459 Int_t end = hidden.Index(":", beg + 1);
02460 while (end != -1) {
02461 Int_t num = atoi(TString(hidden(beg, end - beg)).Data());
02462 cnt++;
02463 beg = end + 1;
02464 end = hidden.Index(":", beg + 1);
02465 if(layer+1==cnt) nNeurons_next = num;
02466 }
02467 Int_t num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
02468 cnt++;
02469 if(layer+1==cnt) nNeurons_next = num;
02470 }
02471 Float_t yStep_this = 1./(nNeurons_this+1.);
02472 Float_t yStep_next = 1./(nNeurons_next+1.);
02473 TObjArrayIter* it = (TObjArrayIter *) fSynapses.MakeIterator();
02474 TSynapse *theSynapse = 0;
02475 Float_t maxWeight = 0;
02476 while ((theSynapse = (TSynapse *) it->Next()))
02477 maxWeight = maxWeight < theSynapse->GetWeight() ? theSynapse->GetWeight() : maxWeight;
02478 delete it;
02479 it = (TObjArrayIter *) fSynapses.MakeIterator();
02480 for(Int_t neuron1=0; neuron1<nNeurons_this; neuron1++) {
02481 for(Int_t neuron2=0; neuron2<nNeurons_next; neuron2++) {
02482 TLine* synapse = new TLine(xStep*(layer+1),yStep_this*(neuron1+1),xStep*(layer+2),yStep_next*(neuron2+1));
02483 synapse->Draw();
02484 theSynapse = (TSynapse *) it->Next();
02485 if (!theSynapse) continue;
02486 synapse->SetLineWidth(Int_t((theSynapse->GetWeight()/maxWeight)*10.));
02487 synapse->SetLineStyle(1);
02488 if(((TMath::Abs(theSynapse->GetWeight())/maxWeight)*10.)<0.5) synapse->SetLineStyle(2);
02489 if(((TMath::Abs(theSynapse->GetWeight())/maxWeight)*10.)<0.25) synapse->SetLineStyle(3);
02490 }
02491 }
02492 delete it;
02493 }
02494 for(layer=0; layer< nLayers; layer++) {
02495 Float_t nNeurons = 0;
02496 if(layer==0) {
02497 TString input = TString(fStructure(0, fStructure.First(':')));
02498 nNeurons = input.CountChar(',')+1;
02499 }
02500 else if(layer==nLayers-1) {
02501 TString output = TString(fStructure(fStructure.Last(':') + 1,fStructure.Length() - fStructure.Last(':')));
02502 nNeurons = output.CountChar(',')+1;
02503 }
02504 else {
02505 Int_t cnt=0;
02506 TString hidden = TString(fStructure(fStructure.First(':') + 1,fStructure.Last(':') - fStructure.First(':') - 1));
02507 Int_t beg = 0;
02508 Int_t end = hidden.Index(":", beg + 1);
02509 while (end != -1) {
02510 Int_t num = atoi(TString(hidden(beg, end - beg)).Data());
02511 cnt++;
02512 beg = end + 1;
02513 end = hidden.Index(":", beg + 1);
02514 if(layer==cnt) nNeurons = num;
02515 }
02516 Int_t num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
02517 cnt++;
02518 if(layer==cnt) nNeurons = num;
02519 }
02520 Float_t yStep = 1./(nNeurons+1.);
02521 for(Int_t neuron=0; neuron<nNeurons; neuron++) {
02522 TMarker* m = new TMarker(xStep*(layer+1),yStep*(neuron+1),20);
02523 m->SetMarkerColor(4);
02524 m->SetMarkerSize(NeuronSize);
02525 m->Draw();
02526 }
02527 }
02528 const TString input = TString(fStructure(0, fStructure.First(':')));
02529 const TObjArray *inpL = input.Tokenize(" ,");
02530 const Int_t nrItems = inpL->GetLast()+1;
02531 Float_t yStep = 1./(nrItems+1);
02532 for (Int_t item = 0; item < nrItems; item++) {
02533 const TString brName = ((TObjString *)inpL->At(item))->GetString();
02534 TText* label = new TText(0.5*xStep,yStep*(item+1),brName.Data());
02535 label->Draw();
02536 }
02537 delete inpL;
02538
02539 Int_t numOutNodes=fLastLayer.GetEntriesFast();
02540 yStep=1./(numOutNodes+1);
02541 for (Int_t outnode=0; outnode<numOutNodes; outnode++) {
02542 TNeuron* neuron=(TNeuron*)fLastLayer[outnode];
02543 if (neuron && neuron->GetName()) {
02544 TText* label = new TText(xStep*nLayers,
02545 yStep*(outnode+1),
02546 neuron->GetName());
02547 label->Draw();
02548 }
02549 }
02550 }