TMVAClassificationApplication.C

Go to the documentation of this file.
00001 /**********************************************************************************
00002  * Project   : TMVA - a Root-integrated toolkit for multivariate data analysis    *
00003  * Package   : TMVA                                                               *
00004  * Exectuable: TMVAClassificationApplication                                      *
00005  *                                                                                *
00006  * This macro provides a simple example on how to use the trained classifiers     *
00007  * within an analysis module                                                      *
00008  **********************************************************************************/
00009 
00010 #include <cstdlib>
00011 #include <vector>
00012 #include <iostream>
00013 #include <map>
00014 #include <string>
00015 
00016 #include "TFile.h"
00017 #include "TTree.h"
00018 #include "TString.h"
00019 #include "TSystem.h"
00020 #include "TROOT.h"
00021 #include "TStopwatch.h"
00022 
00023 #include "TMVAGui.C"
00024 
00025 #if not defined(__CINT__) || defined(__MAKECINT__)
00026 #include "TMVA/Tools.h"
00027 #include "TMVA/Reader.h"
00028 #include "TMVA/MethodCuts.h"
00029 #endif
00030 
00031 using namespace TMVA;
00032 
00033 void TMVAClassificationApplication( TString myMethodList = "" ) 
00034 {   
00035 #ifdef __CINT__
00036    gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
00037 #endif
00038 
00039    //---------------------------------------------------------------
00040 
00041    // This loads the library
00042    TMVA::Tools::Instance();
00043 
00044    // Default MVA methods to be trained + tested
00045    std::map<std::string,int> Use;
00046 
00047    // --- Cut optimisation
00048    Use["Cuts"]            = 1;
00049    Use["CutsD"]           = 1;
00050    Use["CutsPCA"]         = 0;
00051    Use["CutsGA"]          = 0;
00052    Use["CutsSA"]          = 0;
00053    // 
00054    // --- 1-dimensional likelihood ("naive Bayes estimator")
00055    Use["Likelihood"]      = 1;
00056    Use["LikelihoodD"]     = 0; // the "D" extension indicates decorrelated input variables (see option strings)
00057    Use["LikelihoodPCA"]   = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
00058    Use["LikelihoodKDE"]   = 0;
00059    Use["LikelihoodMIX"]   = 0;
00060    //
00061    // --- Mutidimensional likelihood and Nearest-Neighbour methods
00062    Use["PDERS"]           = 1;
00063    Use["PDERSD"]          = 0;
00064    Use["PDERSPCA"]        = 0;
00065    Use["PDEFoam"]         = 1;
00066    Use["PDEFoamBoost"]    = 0; // uses generalised MVA method boosting
00067    Use["KNN"]             = 1; // k-nearest neighbour method
00068    //
00069    // --- Linear Discriminant Analysis
00070    Use["LD"]              = 1; // Linear Discriminant identical to Fisher
00071    Use["Fisher"]          = 0;
00072    Use["FisherG"]         = 0;
00073    Use["BoostedFisher"]   = 0; // uses generalised MVA method boosting
00074    Use["HMatrix"]         = 0;
00075    //
00076    // --- Function Discriminant analysis
00077    Use["FDA_GA"]          = 1; // minimisation of user-defined function using Genetics Algorithm
00078    Use["FDA_SA"]          = 0;
00079    Use["FDA_MC"]          = 0;
00080    Use["FDA_MT"]          = 0;
00081    Use["FDA_GAMT"]        = 0;
00082    Use["FDA_MCMT"]        = 0;
00083    //
00084    // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
00085    Use["MLP"]             = 0; // Recommended ANN
00086    Use["MLPBFGS"]         = 0; // Recommended ANN with optional training method
00087    Use["MLPBNN"]          = 1; // Recommended ANN with BFGS training method and bayesian regulator
00088    Use["CFMlpANN"]        = 0; // Depreciated ANN from ALEPH
00089    Use["TMlpANN"]         = 0; // ROOT's own ANN
00090    //
00091    // --- Support Vector Machine 
00092    Use["SVM"]             = 1;
00093    // 
00094    // --- Boosted Decision Trees
00095    Use["BDT"]             = 1; // uses Adaptive Boost
00096    Use["BDTG"]            = 0; // uses Gradient Boost
00097    Use["BDTB"]            = 0; // uses Bagging
00098    Use["BDTD"]            = 0; // decorrelation + Adaptive Boost
00099    // 
00100    // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
00101    Use["RuleFit"]         = 1;
00102    // ---------------------------------------------------------------
00103    Use["Plugin"]          = 0;
00104    Use["Category"]        = 0;
00105    Use["SVM_Gauss"]       = 0;
00106    Use["SVM_Poly"]        = 0;
00107    Use["SVM_Lin"]         = 0;
00108 
00109    std::cout << std::endl;
00110    std::cout << "==> Start TMVAClassificationApplication" << std::endl;
00111 
00112    // Select methods (don't look at this code - not of interest)
00113    if (myMethodList != "") {
00114       for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
00115 
00116       std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
00117       for (UInt_t i=0; i<mlist.size(); i++) {
00118          std::string regMethod(mlist[i]);
00119 
00120          if (Use.find(regMethod) == Use.end()) {
00121             std::cout << "Method \"" << regMethod 
00122                       << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
00123             for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
00124                std::cout << it->first << " ";
00125             }
00126             std::cout << std::endl;
00127             return;
00128          }
00129          Use[regMethod] = 1;
00130       }
00131    }
00132 
00133    // --------------------------------------------------------------------------------------------------
00134 
00135    // --- Create the Reader object
00136 
00137    TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );    
00138 
00139    // Create a set of variables and declare them to the reader
00140    // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
00141    Float_t var1, var2;
00142    Float_t var3, var4;
00143    reader->AddVariable( "myvar1 := var1+var2", &var1 );
00144    reader->AddVariable( "myvar2 := var1-var2", &var2 );
00145    reader->AddVariable( "var3",                &var3 );
00146    reader->AddVariable( "var4",                &var4 );
00147 
00148    // Spectator variables declared in the training have to be added to the reader, too
00149    Float_t spec1,spec2;
00150    reader->AddSpectator( "spec1 := var1*2",   &spec1 );
00151    reader->AddSpectator( "spec2 := var1*3",   &spec2 );
00152 
00153    Float_t Category_cat1, Category_cat2, Category_cat3;
00154    if (Use["Category"]){
00155       // Add artificial spectators for distinguishing categories
00156       reader->AddSpectator( "Category_cat1 := var3<=0",             &Category_cat1 );
00157       reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)",  &Category_cat2 );
00158       reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
00159    }
00160 
00161    // --- Book the MVA methods
00162 
00163    TString dir    = "weights/";
00164    TString prefix = "TMVAClassification";
00165 
00166    // Book method(s)
00167    for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
00168       if (it->second) {
00169          TString methodName = TString(it->first) + TString(" method");
00170          TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
00171          reader->BookMVA( methodName, weightfile ); 
00172       }
00173    }
00174    
00175    // Book output histograms
00176    UInt_t nbin = 100;
00177    TH1F   *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
00178    TH1F   *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
00179    TH1F   *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
00180    TH1F   *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
00181    TH1F   *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);
00182 
00183    if (Use["Likelihood"])    histLk      = new TH1F( "MVA_Likelihood",    "MVA_Likelihood",    nbin, -1, 1 );
00184    if (Use["LikelihoodD"])   histLkD     = new TH1F( "MVA_LikelihoodD",   "MVA_LikelihoodD",   nbin, -1, 0.9999 );
00185    if (Use["LikelihoodPCA"]) histLkPCA   = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
00186    if (Use["LikelihoodKDE"]) histLkKDE   = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin,  -0.00001, 0.99999 );
00187    if (Use["LikelihoodMIX"]) histLkMIX   = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin,  0, 1 );
00188    if (Use["PDERS"])         histPD      = new TH1F( "MVA_PDERS",         "MVA_PDERS",         nbin,  0, 1 );
00189    if (Use["PDERSD"])        histPDD     = new TH1F( "MVA_PDERSD",        "MVA_PDERSD",        nbin,  0, 1 );
00190    if (Use["PDERSPCA"])      histPDPCA   = new TH1F( "MVA_PDERSPCA",      "MVA_PDERSPCA",      nbin,  0, 1 );
00191    if (Use["KNN"])           histKNN     = new TH1F( "MVA_KNN",           "MVA_KNN",           nbin,  0, 1 );
00192    if (Use["HMatrix"])       histHm      = new TH1F( "MVA_HMatrix",       "MVA_HMatrix",       nbin, -0.95, 1.55 );
00193    if (Use["Fisher"])        histFi      = new TH1F( "MVA_Fisher",        "MVA_Fisher",        nbin, -4, 4 );
00194    if (Use["FisherG"])       histFiG     = new TH1F( "MVA_FisherG",       "MVA_FisherG",       nbin, -1, 1 );
00195    if (Use["BoostedFisher"]) histFiB     = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
00196    if (Use["LD"])            histLD      = new TH1F( "MVA_LD",            "MVA_LD",            nbin, -2, 2 );
00197    if (Use["MLP"])           histNn      = new TH1F( "MVA_MLP",           "MVA_MLP",           nbin, -1.25, 1.5 );
00198    if (Use["MLPBFGS"])       histNnbfgs  = new TH1F( "MVA_MLPBFGS",       "MVA_MLPBFGS",       nbin, -1.25, 1.5 );
00199    if (Use["MLPBNN"])        histNnbnn   = new TH1F( "MVA_MLPBNN",        "MVA_MLPBNN",        nbin, -1.25, 1.5 );
00200    if (Use["CFMlpANN"])      histNnC     = new TH1F( "MVA_CFMlpANN",      "MVA_CFMlpANN",      nbin,  0, 1 );
00201    if (Use["TMlpANN"])       histNnT     = new TH1F( "MVA_TMlpANN",       "MVA_TMlpANN",       nbin, -1.3, 1.3 );
00202    if (Use["BDT"])           histBdt     = new TH1F( "MVA_BDT",           "MVA_BDT",           nbin, -0.8, 0.8 );
00203    if (Use["BDTD"])          histBdtD    = new TH1F( "MVA_BDTD",          "MVA_BDTD",          nbin, -0.8, 0.8 );
00204    if (Use["BDTG"])          histBdtG    = new TH1F( "MVA_BDTG",          "MVA_BDTG",          nbin, -1.0, 1.0 );
00205    if (Use["RuleFit"])       histRf      = new TH1F( "MVA_RuleFit",       "MVA_RuleFit",       nbin, -2.0, 2.0 );
00206    if (Use["SVM_Gauss"])     histSVMG    = new TH1F( "MVA_SVM_Gauss",     "MVA_SVM_Gauss",     nbin,  0.0, 1.0 );
00207    if (Use["SVM_Poly"])      histSVMP    = new TH1F( "MVA_SVM_Poly",      "MVA_SVM_Poly",      nbin,  0.0, 1.0 );
00208    if (Use["SVM_Lin"])       histSVML    = new TH1F( "MVA_SVM_Lin",       "MVA_SVM_Lin",       nbin,  0.0, 1.0 );
00209    if (Use["FDA_MT"])        histFDAMT   = new TH1F( "MVA_FDA_MT",        "MVA_FDA_MT",        nbin, -2.0, 3.0 );
00210    if (Use["FDA_GA"])        histFDAGA   = new TH1F( "MVA_FDA_GA",        "MVA_FDA_GA",        nbin, -2.0, 3.0 );
00211    if (Use["Category"])      histCat     = new TH1F( "MVA_Category",      "MVA_Category",      nbin, -2., 2. );
00212    if (Use["Plugin"])        histPBdt    = new TH1F( "MVA_PBDT",          "MVA_BDT",           nbin, -0.8, 0.8 );
00213 
00214    // PDEFoam also returns per-event error, fill in histogram, and also fill significance
00215    if (Use["PDEFoam"]) {
00216       histPDEFoam    = new TH1F( "MVA_PDEFoam",       "MVA_PDEFoam",              nbin,  0, 1 );
00217       histPDEFoamErr = new TH1F( "MVA_PDEFoamErr",    "MVA_PDEFoam error",        nbin,  0, 1 );
00218       histPDEFoamSig = new TH1F( "MVA_PDEFoamSig",    "MVA_PDEFoam significance", nbin,  0, 10 );
00219    }
00220 
00221    // Book example histogram for probability (the other methods are done similarly)
00222    TH1F *probHistFi(0), *rarityHistFi(0);
00223    if (Use["Fisher"]) {
00224       probHistFi   = new TH1F( "MVA_Fisher_Proba",  "MVA_Fisher_Proba",  nbin, 0, 1 );
00225       rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
00226    }
00227 
00228    // Prepare input tree (this must be replaced by your data source)
00229    // in this example, there is a toy tree with signal and one with background events
00230    // we'll later on use only the "signal" events for the test in this example.
00231    //   
00232    TFile *input(0);
00233    TString fname = "./tmva_example.root";   
00234    if (!gSystem->AccessPathName( fname )) 
00235       input = TFile::Open( fname ); // check if file in local directory exists
00236    else    
00237       input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server
00238    
00239    if (!input) {
00240       std::cout << "ERROR: could not open data file" << std::endl;
00241       exit(1);
00242    }
00243    std::cout << "--- TMVAClassificationApp    : Using input file: " << input->GetName() << std::endl;
00244    
00245    // --- Event loop
00246 
00247    // Prepare the event tree
00248    // - here the variable names have to corresponds to your tree
00249    // - you can use the same variables as above which is slightly faster,
00250    //   but of course you can use different ones and copy the values inside the event loop
00251    //
00252    std::cout << "--- Select signal sample" << std::endl;
00253    TTree* theTree = (TTree*)input->Get("TreeS");
00254    Float_t userVar1, userVar2;
00255    theTree->SetBranchAddress( "var1", &userVar1 );
00256    theTree->SetBranchAddress( "var2", &userVar2 );
00257    theTree->SetBranchAddress( "var3", &var3 );
00258    theTree->SetBranchAddress( "var4", &var4 );
00259 
00260    // Efficiency calculator for cut method
00261    Int_t    nSelCutsGA = 0;
00262    Double_t effS       = 0.7;
00263 
00264    std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests
00265 
00266    std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
00267    TStopwatch sw;
00268    sw.Start();
00269    for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
00270 
00271       if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;
00272 
00273       theTree->GetEntry(ievt);
00274 
00275       var1 = userVar1 + userVar2;
00276       var2 = userVar1 - userVar2;
00277 
00278       // --- Return the MVA outputs and fill into histograms
00279 
00280       if (Use["CutsGA"]) {
00281          // Cuts is a special case: give the desired signal efficienciy
00282          Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
00283          if (passed) nSelCutsGA++;
00284       }
00285 
00286       if (Use["Likelihood"   ])   histLk     ->Fill( reader->EvaluateMVA( "Likelihood method"    ) );
00287       if (Use["LikelihoodD"  ])   histLkD    ->Fill( reader->EvaluateMVA( "LikelihoodD method"   ) );
00288       if (Use["LikelihoodPCA"])   histLkPCA  ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) );
00289       if (Use["LikelihoodKDE"])   histLkKDE  ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) );
00290       if (Use["LikelihoodMIX"])   histLkMIX  ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) );
00291       if (Use["PDERS"        ])   histPD     ->Fill( reader->EvaluateMVA( "PDERS method"         ) );
00292       if (Use["PDERSD"       ])   histPDD    ->Fill( reader->EvaluateMVA( "PDERSD method"        ) );
00293       if (Use["PDERSPCA"     ])   histPDPCA  ->Fill( reader->EvaluateMVA( "PDERSPCA method"      ) );
00294       if (Use["KNN"          ])   histKNN    ->Fill( reader->EvaluateMVA( "KNN method"           ) );
00295       if (Use["HMatrix"      ])   histHm     ->Fill( reader->EvaluateMVA( "HMatrix method"       ) );
00296       if (Use["Fisher"       ])   histFi     ->Fill( reader->EvaluateMVA( "Fisher method"        ) );
00297       if (Use["FisherG"      ])   histFiG    ->Fill( reader->EvaluateMVA( "FisherG method"       ) );
00298       if (Use["BoostedFisher"])   histFiB    ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) );
00299       if (Use["LD"           ])   histLD     ->Fill( reader->EvaluateMVA( "LD method"            ) );
00300       if (Use["MLP"          ])   histNn     ->Fill( reader->EvaluateMVA( "MLP method"           ) );
00301       if (Use["MLPBFGS"      ])   histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method"       ) );
00302       if (Use["MLPBNN"       ])   histNnbnn  ->Fill( reader->EvaluateMVA( "MLPBNN method"        ) );
00303       if (Use["CFMlpANN"     ])   histNnC    ->Fill( reader->EvaluateMVA( "CFMlpANN method"      ) );
00304       if (Use["TMlpANN"      ])   histNnT    ->Fill( reader->EvaluateMVA( "TMlpANN method"       ) );
00305       if (Use["BDT"          ])   histBdt    ->Fill( reader->EvaluateMVA( "BDT method"           ) );
00306       if (Use["BDTD"         ])   histBdtD   ->Fill( reader->EvaluateMVA( "BDTD method"          ) );
00307       if (Use["BDTG"         ])   histBdtG   ->Fill( reader->EvaluateMVA( "BDTG method"          ) );
00308       if (Use["RuleFit"      ])   histRf     ->Fill( reader->EvaluateMVA( "RuleFit method"       ) );
00309       if (Use["SVM_Gauss"    ])   histSVMG   ->Fill( reader->EvaluateMVA( "SVM_Gauss method"     ) );
00310       if (Use["SVM_Poly"     ])   histSVMP   ->Fill( reader->EvaluateMVA( "SVM_Poly method"      ) );
00311       if (Use["SVM_Lin"      ])   histSVML   ->Fill( reader->EvaluateMVA( "SVM_Lin method"       ) );
00312       if (Use["FDA_MT"       ])   histFDAMT  ->Fill( reader->EvaluateMVA( "FDA_MT method"        ) );
00313       if (Use["FDA_GA"       ])   histFDAGA  ->Fill( reader->EvaluateMVA( "FDA_GA method"        ) );
00314       if (Use["Category"     ])   histCat    ->Fill( reader->EvaluateMVA( "Category method"      ) );
00315       if (Use["Plugin"       ])   histPBdt   ->Fill( reader->EvaluateMVA( "P_BDT method"         ) );
00316 
00317       // Retrieve also per-event error
00318       if (Use["PDEFoam"]) {
00319          Double_t val = reader->EvaluateMVA( "PDEFoam method" );
00320          Double_t err = reader->GetMVAError();
00321          histPDEFoam   ->Fill( val );
00322          histPDEFoamErr->Fill( err );         
00323          if (err>1.e-50) histPDEFoamSig->Fill( val/err );
00324       }         
00325 
00326       // Retrieve probability instead of MVA output
00327       if (Use["Fisher"])   {
00328          probHistFi  ->Fill( reader->GetProba ( "Fisher method" ) );
00329          rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) );
00330       }
00331    }
00332 
00333    // Get elapsed time
00334    sw.Stop();
00335    std::cout << "--- End of event loop: "; sw.Print();
00336 
00337    // Get efficiency for cuts classifier
00338    if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
00339                                 << " (for a required signal efficiency of " << effS << ")" << std::endl;
00340 
00341    if (Use["CutsGA"]) {
00342 
00343       // test: retrieve cuts for particular signal efficiency
00344       // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer  
00345       TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;
00346 
00347       if (mcuts) {      
00348          std::vector<Double_t> cutsMin;
00349          std::vector<Double_t> cutsMax;
00350          mcuts->GetCuts( 0.7, cutsMin, cutsMax );
00351          std::cout << "--- -------------------------------------------------------------" << std::endl;
00352          std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
00353          for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
00354             std::cout << "... Cut: " 
00355                       << cutsMin[ivar] 
00356                       << " < \"" 
00357                       << mcuts->GetInputVar(ivar)
00358                       << "\" <= " 
00359                       << cutsMax[ivar] << std::endl;
00360          }
00361          std::cout << "--- -------------------------------------------------------------" << std::endl;
00362       }
00363    }
00364 
00365    // --- Write histograms
00366 
00367    TFile *target  = new TFile( "TMVApp.root","RECREATE" );
00368    if (Use["Likelihood"   ])   histLk     ->Write();
00369    if (Use["LikelihoodD"  ])   histLkD    ->Write();
00370    if (Use["LikelihoodPCA"])   histLkPCA  ->Write();
00371    if (Use["LikelihoodKDE"])   histLkKDE  ->Write();
00372    if (Use["LikelihoodMIX"])   histLkMIX  ->Write();
00373    if (Use["PDERS"        ])   histPD     ->Write();
00374    if (Use["PDERSD"       ])   histPDD    ->Write();
00375    if (Use["PDERSPCA"     ])   histPDPCA  ->Write();
00376    if (Use["KNN"          ])   histKNN    ->Write();
00377    if (Use["HMatrix"      ])   histHm     ->Write();
00378    if (Use["Fisher"       ])   histFi     ->Write();
00379    if (Use["FisherG"      ])   histFiG    ->Write();
00380    if (Use["BoostedFisher"])   histFiB    ->Write();
00381    if (Use["LD"           ])   histLD     ->Write();
00382    if (Use["MLP"          ])   histNn     ->Write();
00383    if (Use["MLPBFGS"      ])   histNnbfgs ->Write();
00384    if (Use["MLPBNN"       ])   histNnbnn  ->Write();
00385    if (Use["CFMlpANN"     ])   histNnC    ->Write();
00386    if (Use["TMlpANN"      ])   histNnT    ->Write();
00387    if (Use["BDT"          ])   histBdt    ->Write();
00388    if (Use["BDTD"         ])   histBdtD   ->Write();
00389    if (Use["BDTG"         ])   histBdtG   ->Write(); 
00390    if (Use["RuleFit"      ])   histRf     ->Write();
00391    if (Use["SVM_Gauss"    ])   histSVMG   ->Write();
00392    if (Use["SVM_Poly"     ])   histSVMP   ->Write();
00393    if (Use["SVM_Lin"      ])   histSVML   ->Write();
00394    if (Use["FDA_MT"       ])   histFDAMT  ->Write();
00395    if (Use["FDA_GA"       ])   histFDAGA  ->Write();
00396    if (Use["Category"     ])   histCat    ->Write();
00397    if (Use["Plugin"       ])   histPBdt   ->Write();
00398 
00399    // Write also error and significance histos
00400    if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }
00401 
00402    // Write also probability hists
00403    if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
00404    target->Close();
00405 
00406    std::cout << "--- Created root file: \"TMVApp.root\" containing the MVA output histograms" << std::endl;
00407   
00408    delete reader;
00409     
00410    std::cout << "==> TMVAClassificationApplication is done!" << endl << std::endl;
00411 } 

Generated on Tue Jul 5 15:26:36 2011 for ROOT_528-00b_version by  doxygen 1.5.1