00001 
00002 
00003 
00004 
00005 
00006 
00007 
00008 
00009 
00010 
00011 
00012 
00013 
00014 
00015 
00016 
00017 
00018 
00019 
00020 
00021 
00022 
00023 
00024 
00025 
00026 
00027 
00028 
00029 
00030 
00031 
00032 
00033 
00034 
00035 #include <cstdlib>
00036 #include <iostream> 
00037 #include <map>
00038 #include <string>
00039 
00040 #include "TChain.h"
00041 #include "TFile.h"
00042 #include "TTree.h"
00043 #include "TString.h"
00044 #include "TObjString.h"
00045 #include "TSystem.h"
00046 #include "TROOT.h"
00047 
00048 #include "TMVA/Factory.h"
00049 #include "TMVA/Tools.h"
00050 
00051 int main( int argc, char** argv ) 
00052 {
00053    
00054    
00055    
00056 
00057    
00058    
00059    
00060    
00061 
00062    
00063    
00064    std::map<std::string,int> Use;
00065 
00066    
00067    Use["PDERS"]           = 0;
00068    Use["PDEFoam"]         = 1; 
00069    Use["KNN"]             = 1;
00070    
00071    
00072    Use["LD"]                    = 1;
00073    
00074    
00075    Use["FDA_GA"]          = 1;
00076    Use["FDA_MC"]          = 0;
00077    Use["FDA_MT"]          = 0;
00078    Use["FDA_GAMT"]        = 0;
00079    
00080    
00081    Use["MLP"]             = 1; 
00082    
00083    
00084    Use["SVM"]             = 0;
00085    
00086    
00087    Use["BDT"]             = 1;
00088    Use["BDTG"]            = 0;
00089    
00090 
00091    std::cout << std::endl;
00092    std::cout << "==> Start TMVARegression" << std::endl;
00093 
00094    
00095    if (argc>1) for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
00096    for (int i=1; i<argc; i++) {
00097       std::string regMethod(argv[i]);
00098       if (Use.find(regMethod) == Use.end()) {
00099          std::cout << "Method \"" << regMethod << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
00100          for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " ";
00101          std::cout << std::endl;
00102          return 1;
00103       }
00104       Use[regMethod] = 1;
00105    }
00106 
00107    
00108 
00109    
00110 
00111    
00112    TString outfileName( "TMVAReg.root" );
00113    TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
00114 
00115    
00116    
00117    
00118    
00119    
00120    
00121    
00122    
00123    
00124    
00125    TMVA::Factory *factory = new TMVA::Factory( "TMVARegression", outputFile, 
00126                                                "!V:!Silent:Color:DrawProgressBar" );
00127 
00128    
00129    
00130    
00131    
00132 
00133    
00134    
00135    
00136    factory->AddVariable( "var1", "Variable 1", "units", 'F' );
00137    factory->AddVariable( "var2", "Variable 2", "units", 'F' );
00138 
00139    
00140    
00141    
00142    factory->AddSpectator( "spec1:=var1*2",  "Spectator 1", "units", 'F' );
00143    factory->AddSpectator( "spec2:=var1*3",  "Spectator 2", "units", 'F' );
00144 
00145    
00146    factory->AddTarget( "fvalue" ); 
00147 
00148    
00149    
00150    
00151 
00152    
00153    
00154    TFile *input(0);
00155    TString fname = "./tmva_reg_example.root";
00156    if (!gSystem->AccessPathName( fname )) 
00157       input = TFile::Open( fname ); 
00158    else 
00159       input = TFile::Open( "http://root.cern.ch/files/tmva_reg_example.root" ); 
00160    
00161    if (!input) {
00162       std::cout << "ERROR: could not open data file" << std::endl;
00163       exit(1);
00164    }
00165    std::cout << "--- TMVARegression           : Using input file: " << input->GetName() << std::endl;
00166 
00167    
00168 
00169    TTree *regTree = (TTree*)input->Get("TreeR");
00170 
00171    
00172    Double_t regWeight  = 1.0;   
00173 
00174    
00175    factory->AddRegressionTree( regTree, regWeight );
00176 
00177    
00178    
00179    factory->SetWeightExpression( "var1", "Regression" );
00180 
00181    
00182    TCut mycut = ""; 
00183 
00184    
00185    factory->PrepareTrainingAndTestTree( mycut, 
00186                                         "nTrain_Regression=0:nTest_Regression=0:SplitMode=Random:NormMode=NumEvents:!V" );
00187 
00188    
00189    
00190    
00191 
00192    
00193    
00194    
00195    
00196    
00197    
00198 
00199    
00200    if (Use["PDERS"])
00201       factory->BookMethod( TMVA::Types::kPDERS, "PDERS", 
00202                            "!H:!V:NormTree=T:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=40:NEventsMax=60:VarTransform=None" );
00203    
00204    
00205    
00206 
00207    if (Use["PDEFoam"])
00208        factory->BookMethod( TMVA::Types::kPDEFoam, "PDEFoam", 
00209                             "!H:!V:MultiTargetRegression=F:TargetSelection=Mpv:TailCut=0.001:VolFrac=0.0333:nActiveCells=500:nSampl=2000:nBin=5:Compress=T:Kernel=None:Nmin=10:VarTransform=None" );
00210 
00211    
00212    if (Use["KNN"])
00213       factory->BookMethod( TMVA::Types::kKNN, "KNN", 
00214                            "nkNN=20:ScaleFrac=0.8:SigmaFact=1.0:Kernel=Gaus:UseKernel=F:UseWeight=T:!Trim" );
00215 
00216    
00217    if (Use["LD"])
00218       factory->BookMethod( TMVA::Types::kLD, "LD", 
00219                            "!H:!V:VarTransform=None" );
00220 
00221         
00222    if (Use["FDA_MC"]) 
00223       factory->BookMethod( TMVA::Types::kFDA, "FDA_MC",
00224                           "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=MC:SampleSize=100000:Sigma=0.1:VarTransform=D" );
00225    
00226    if (Use["FDA_GA"]) 
00227       factory->BookMethod( TMVA::Types::kFDA, "FDA_GA",
00228                            "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:PopSize=100:Cycles=3:Steps=30:Trim=True:SaveBestGen=1:VarTransform=Norm" );
00229 
00230    if (Use["FDA_MT"]) 
00231       factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
00232                            "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
00233 
00234    if (Use["FDA_GAMT"]) 
00235       factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
00236                            "!H:!V:Formula=(0)+(1)*x0+(2)*x1:ParRanges=(-100,100);(-100,100);(-100,100):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
00237 
00238    
00239    if (Use["MLP"])
00240       factory->BookMethod( TMVA::Types::kMLP, "MLP", "!H:!V:VarTransform=Norm:NeuronType=tanh:NCycles=20000:HiddenLayers=N+20:TestRate=6:TrainingMethod=BFGS:Sampling=0.3:SamplingEpoch=0.8:ConvergenceImprove=1e-6:ConvergenceTests=15:!UseRegulator" );
00241 
00242    
00243    if (Use["SVM"])
00244       factory->BookMethod( TMVA::Types::kSVM, "SVM", "Gamma=0.25:Tol=0.001:VarTransform=Norm" );
00245 
00246    
00247    if (Use["BDT"])
00248      factory->BookMethod( TMVA::Types::kBDT, "BDT",
00249                            "!H:!V:NTrees=100:nEventsMin=5:BoostType=AdaBoostR2:SeparationType=RegressionVariance:nCuts=20:PruneMethod=CostComplexity:PruneStrength=30" );
00250 
00251    if (Use["BDTG"])
00252      factory->BookMethod( TMVA::Types::kBDT, "BDTG",
00253                            "!H:!V:NTrees=2000::BoostType=Grad:Shrinkage=0.1:UseBaggedGrad:GradBaggingFraction=0.5nCuts=20:MaxDepth=3:NNodesMax=15" );
00254    
00255 
00256    
00257 
00258    
00259    factory->TrainAllMethods();
00260 
00261    
00262    factory->TestAllMethods();
00263 
00264    
00265    factory->EvaluateAllMethods();    
00266 
00267    
00268    
00269    
00270    outputFile->Close();
00271 
00272    std::cout << "==> Wrote root file: " << outputFile->GetName() << std::endl;
00273    std::cout << "==> TMVARegression is done!" << std::endl;      
00274 
00275    delete factory;
00276 
00277    std::cout << std::endl;
00278    std::cout << "==> Too view the results, launch the GUI: \"root -l TMVARegGui.C\"" << std::endl;
00279    std::cout << std::endl;
00280 }
00281