TMVAClassificationApplication.cxx

Go to the documentation of this file.
00001 /**********************************************************************************
00002  * Project   : TMVA - a Root-integrated toolkit for multivariate data analysis    *
00003  * Package   : TMVA                                                               *
00004  * Exectuable: TMVAClassificationApplication                                      *
00005  *                                                                                *
00006  * This macro provides a simple example on how to use the trained classifiers     *
00007  * within an analysis module                                                      *
00008  **********************************************************************************/
00009 
00010 #include <cstdlib>
00011 #include <vector>
00012 #include <iostream>
00013 #include <map>
00014 #include <string>
00015 
00016 #include "TFile.h"
00017 #include "TTree.h"
00018 #include "TString.h"
00019 #include "TSystem.h"
00020 #include "TROOT.h"
00021 #include "TStopwatch.h"
00022 
00023 #include "TMVA/Reader.h"
00024 #include "TMVA/Config.h"
00025 #include "TMVA/Tools.h"
00026 #include "TMVA/MethodCuts.h"
00027 
00028 int main( int argc, char** argv )
00029 {
00030    //---------------------------------------------------------------
00031    // Default MVA methods to be trained + tested
00032    std::map<std::string,int> Use;
00033 
00034    // --- Cut optimisation
00035    Use["Cuts"]            = 1;
00036    Use["CutsD"]           = 1;
00037    Use["CutsPCA"]         = 0;
00038    Use["CutsGA"]          = 0;
00039    Use["CutsSA"]          = 0;
00040    // 
00041    // --- 1-dimensional likelihood ("naive Bayes estimator")
00042    Use["Likelihood"]      = 1;
00043    Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
00044    Use["LikelihoodPCA"]   = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
00045    Use["LikelihoodKDE"]   = 0;
00046    Use["LikelihoodMIX"]   = 0;
00047    //
00048    // --- Mutidimensional likelihood and Nearest-Neighbour methods
00049    Use["PDERS"]           = 1;
00050    Use["PDERSD"]          = 0;
00051    Use["PDERSPCA"]        = 0;
00052    Use["PDEFoam"]         = 1;
00053    Use["PDEFoamBoost"]    = 0; // uses generalised MVA method boosting
00054    Use["KNN"]             = 1; // k-nearest neighbour method
00055    //
00056    // --- Linear Discriminant Analysis
00057    Use["LD"]              = 1; // Linear Discriminant identical to Fisher
00058    Use["Fisher"]          = 0;
00059    Use["FisherG"]         = 0;
00060    Use["BoostedFisher"]   = 0; // uses generalised MVA method boosting
00061    Use["HMatrix"]         = 0;
00062    //
00063    // --- Function Discriminant analysis
00064    Use["FDA_GA"]          = 1; // minimisation of user-defined function using Genetics Algorithm
00065    Use["FDA_SA"]          = 0;
00066    Use["FDA_MC"]          = 0;
00067    Use["FDA_MT"]          = 0;
00068    Use["FDA_GAMT"]        = 0;
00069    Use["FDA_MCMT"]        = 0;
00070    //
00071    // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
00072    Use["MLP"]             = 0; // Recommended ANN
00073    Use["MLPBFGS"]         = 0; // Recommended ANN with optional training method
00074    Use["MLPBNN"]          = 1; // Recommended ANN with BFGS training method and bayesian regulator
00075    Use["CFMlpANN"]        = 0; // Depreciated ANN from ALEPH
00076    Use["TMlpANN"]         = 0; // ROOT's own ANN
00077    //
00078    // --- Support Vector Machine 
00079    Use["SVM"]             = 1;
00080    // 
00081    // --- Boosted Decision Trees
00082    Use["BDT"]             = 1; // uses Adaptive Boost
00083    Use["BDTG"]            = 0; // uses Gradient Boost
00084    Use["BDTB"]            = 0; // uses Bagging
00085    Use["BDTD"]            = 0; // decorrelation + Adaptive Boost
00086    // 
00087    // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
00088    Use["RuleFit"]         = 1;
00089    // ---------------------------------------------------------------
00090 
00091    std::map<std::string,int> nIdenticalResults;
00092 
00093    std::cout << std::endl;
00094    std::cout << "==> Start TMVAClassificationApplication" << std::endl;
00095 
00096    std::cout << "Running the following methods" << std::endl;
00097    if (argc>1) {
00098       for (std::map<std::string,int>::iterator it = Use.begin();
00099            it != Use.end(); it++) {
00100          it->second = 0;
00101          nIdenticalResults[it->first] = 0;
00102       }
00103    }
00104    for (int i=1; i<argc; i++) {
00105       std::string regMethod(argv[i]);
00106       if (Use.find(regMethod) == Use.end()) {
00107          std::cout << "Method " << regMethod << " not known in TMVA under this name. Please try one of:" << std::endl;
00108          for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " ";
00109          std::cout << std::endl;
00110          return 1;
00111       }
00112       Use[regMethod] = kTRUE;
00113    }
00114 
00115    // --------------------------------------------------------------------------------------------------
00116 
00117    // --- Create the Reader object
00118 
00119    TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );
00120 
00121    // Create a set of variables and declare them to the reader
00122    // - the variable names must corresponds in name and type to
00123    // those given in the weight file(s) that you use
00124    Float_t var1, var2;
00125    Float_t var3, var4;
00126    reader->AddVariable( "myvar1 := var1+var2", &var1 );
00127    reader->AddVariable( "myvar2 := var1-var2", &var2 );
00128    reader->AddVariable( "var3",                &var3 );
00129    reader->AddVariable( "var4",                &var4 );
00130 
00131    // Spectator variables declared in the training have to be added to the reader, too
00132    Float_t spec1,spec2;
00133    reader->AddSpectator( "spec1 := var1*2",   &spec1 );
00134    reader->AddSpectator( "spec2 := var1*3",   &spec2 );
00135 
00136    Float_t Category_cat1, Category_cat2, Category_cat3;
00137    if (Use["Category"]){
00138       // Add artificial spectators for distinguishing categories
00139       reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );
00140       reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );
00141       reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
00142    }
00143 
00144    // --- Book the MVA methods
00145 
00146    TString dir    = "weights/";
00147    TString prefix = "TMVAClassification";
00148 
00149    // Book method(s)
00150    for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
00151       if (it->second) {
00152          TString methodName = TString(it->first) + TString(" method");
00153          TString weightfile = dir + prefix + "_" + TString(it->first) + TString(".weights.xml");
00154          reader->BookMVA( methodName, weightfile ); 
00155       }
00156    }
00157 
00158    // Book output histograms
00159    UInt_t nbin = 100;
00160    TH1F   *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
00161    TH1F   *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
00162    TH1F   *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
00163    TH1F   *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
00164    TH1F   *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);
00165 
00166    if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );
00167    if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
00168    if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
00169    if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
00170    if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
00171    if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
00172    if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
00173    if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
00174    if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
00175    if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
00176    if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
00177    if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
00178    if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
00179    if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
00180    if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
00181    if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",       "MVA_MLPBFGS",       nbin, -1.25, 1.5 );
00182    if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",        "MVA_MLPBNN",        nbin, -1.25, 1.5 );
00183    if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
00184    if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
00185    if (Use["BDT"])           histBdt     = new TH1F( "MVA_BDT",           "MVA_BDT",           nbin, -0.8, 0.8 );
00186    if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
00187    if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
00188    if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
00189    if (Use["SVM"])           histSVMG    = new TH1F( "MVA_SVM",           "MVA_SVM",           nbin,  0.0, 1.0 );
00190    if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
00191    if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
00192    if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
00193    if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );
00194 
00195    // PDEFoam also returns per-event error, fill in histogram, and also fill significance
00196    if (Use["PDEFoam"]) {
00197       histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
00198       histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
00199       histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
00200    }
00201 
00202    // Book example histogram for probability (the other methods are done similarly)
00203    TH1F *probHistFi(0), *rarityHistFi(0);
00204    if (Use["Fisher"]) {
00205       probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
00206       rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
00207    }
00208 
00209    // Prepare input tree (this must be replaced by your data source)
00210    // in this example, there is a toy tree with signal and one with background events
00211    // we'll later on use only the "signal" events for the test in this example.
00212    //
00213    TFile *input(0);
00214    TString fname = "./tmva_example.root";
00215    if (!gSystem->AccessPathName( fname )) 
00216       input = TFile::Open( fname ); // check if file in local directory exists
00217    else    
00218       input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server
00219 
00220    if (!input) {
00221       std::cout << "ERROR: could not open data file" << std::endl;
00222       exit(1);
00223    }
00224    std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;
00225    
00226    // --- Event loop
00227 
00228    // Prepare the event tree
00229    // - here the variable names have to corresponds to your tree
00230    // - you can use the same variables as above which is slightly faster,
00231    //   but of course you can use different ones and copy the values inside the event loop
00232    //
00233    std::cout << "--- Select signal sample" << std::endl;
00234    TTree* theTree = (TTree*)input->Get("TreeS");
00235    Float_t userVar1, userVar2;
00236    theTree->SetBranchAddress( "var1", &userVar1 );
00237    theTree->SetBranchAddress( "var2", &userVar2 );
00238    theTree->SetBranchAddress( "var3", &var3 );
00239    theTree->SetBranchAddress( "var4", &var4 );
00240 
00241    // Efficiency calculator for cut method
00242    Int_t    nSelCutsGA = 0;
00243    Double_t effS       = 0.7;
00244 
00245    std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests
00246 
00247    std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
00248    TStopwatch sw;
00249    sw.Start();
00250    Int_t nEvent = theTree->GetEntries();
00251    for (Long64_t ievt=0; ievt<nEvent; ievt++) {
00252 
00253       if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;
00254 
00255       theTree->GetEntry(ievt);
00256 
00257       var1 = userVar1 + userVar2;
00258       var2 = userVar1 - userVar2;
00259 
00260       // --- Return the MVA outputs and fill intto histograms
00261 
00262       if (Use["CutsGA"]) {
00263          // Cuts is a special case: give the desired signal efficienciy
00264          Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
00265          if (passed) nSelCutsGA++;
00266       }
00267 
00268       if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
00269       if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
00270       if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
00271       if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
00272       if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
00273       if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
00274       if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
00275       if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
00276       if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
00277       if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
00278       if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
00279       if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
00280       if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
00281       if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
00282       if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
00283       if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) );
00284       if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) );
00285       if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
00286       if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
00287       if (Use["BDT"          ])   histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ) );
00288       if (Use["BDTD"         ])   histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) );
00289       if (Use["BDTG"         ])   histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) );
00290       if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
00291       if (Use["SVM"          ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM method"           ) );
00292       if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
00293       if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
00294       if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
00295       if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );
00296 
00297       // Retrieve also per-event error
00298       if (Use["PDEFoam"]) {
00299          Double_t val = reader->EvaluateMVA( "PDEFoam method" );
00300          Double_t err = reader->GetMVAError();
00301          histPDEFoam   ->Fill( val );
00302          histPDEFoamErr->Fill( err );
00303          if (err>1.e-50) histPDEFoamSig->Fill( val/err );
00304       }
00305 
00306       // Retrieve probability instead of MVA output
00307       if (Use["Fisher"])   {
00308          probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) );
00309          rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
00310       }
00311    }
00312 
00313    // Get elapsed time
00314    sw.Stop();
00315    std::cout << "--- End of event loop: "; sw.Print();
00316 
00317    // Get efficiency for cuts classifier
00318    if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
00319                                 << " (for a required signal efficiency of " << effS << ")" << std::endl;
00320 
00321    if (Use["CutsGA"]) {
00322 
00323       // test: retrieve cuts for particular signal efficiency
00324       // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
00325       TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;
00326 
00327       if (mcuts) {
00328          std::vector<Double_t> cutsMin;
00329          std::vector<Double_t> cutsMax;
00330          mcuts->GetCuts( 0.7, cutsMin, cutsMax );
00331          std::cout << "--- -------------------------------------------------------------" << std::endl;
00332          std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
00333          for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
00334             std::cout << "... Cut: "
00335                       << cutsMin[ivar]
00336                       << " < \""
00337                       << mcuts->GetInputVar(ivar)
00338                       << "\" <= "
00339                       << cutsMax[ivar] << std::endl;
00340          }
00341          std::cout << "--- -------------------------------------------------------------" << std::endl;
00342       }
00343    }
00344 
00345    // --- Write histograms
00346 
00347    TFile *target  = new TFile( "TMVApp.root","RECREATE" );
00348    if (Use["Likelihood"   ])   histLk     ->Write();
00349    if (Use["LikelihoodD"  ])   histLkD    ->Write();
00350    if (Use["LikelihoodPCA"])   histLkPCA  ->Write();
00351    if (Use["LikelihoodKDE"])   histLkKDE  ->Write();
00352    if (Use["LikelihoodMIX"])   histLkMIX  ->Write();
00353    if (Use["PDERS"        ])   histPD     ->Write();
00354    if (Use["PDERSD"       ])   histPDD    ->Write();
00355    if (Use["PDERSPCA"     ])   histPDPCA  ->Write();
00356    if (Use["KNN"          ])   histKNN    ->Write();
00357    if (Use["HMatrix"      ])   histHm     ->Write();
00358    if (Use["Fisher"       ])   histFi     ->Write();
00359    if (Use["FisherG"      ])   histFiG    ->Write();
00360    if (Use["BoostedFisher"])   histFiB    ->Write();
00361    if (Use["LD"           ])   histLD     ->Write();
00362    if (Use["MLP"          ])   histNn     ->Write();
00363    if (Use["MLPBFGS"      ])   histNnbfgs ->Write();
00364    if (Use["MLPBNN"       ])   histNnbnn  ->Write();
00365    if (Use["CFMlpANN"     ])   histNnC    ->Write();
00366    if (Use["TMlpANN"      ])   histNnT    ->Write();
00367    if (Use["BDT"          ])   histBdt    ->Write();
00368    if (Use["BDTD"         ])   histBdtD   ->Write();
00369    if (Use["BDTG"         ])   histBdtG   ->Write(); 
00370    if (Use["RuleFit"      ])   histRf     ->Write();
00371    if (Use["SVM"          ])   histSVMG   ->Write();
00372    if (Use["FDA_MT"       ])   histFDAMT  ->Write();
00373    if (Use["FDA_GA"       ])   histFDAGA  ->Write();
00374    if (Use["Category"     ])   histCat    ->Write();
00375    if (Use["Plugin"       ])   histPBdt   ->Write();
00376 
00377    // Write also error and significance histos
00378    if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }
00379 
00380    // Write also probability hists
00381    if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
00382    target->Close();
00383 
00384    std::cout << "--- Created root file: \"TMVApp.root\" containing the MVA output histograms" << std::endl;
00385   
00386    delete reader;
00387     
00388    std::cout << "==> TMVAClassificationApplication is done!" << std::endl << std::endl;
00389 }

Generated on Tue Jul 5 15:26:36 2011 for ROOT_528-00b_version by  doxygen 1.5.1