mlpRegression.C

Go to the documentation of this file.
00001 //  This macro shows the use of an ANN for regression analysis: 
00002 //given a set {i} of input vectors i and a set {o} of output vectors o, 
00003 //one looks for the unknown function f(i)=o. 
00004 //The ANN can approximate this function; TMLPAnalyzer::DrawTruthDeviation 
00005 //methods can be used to evaluate the quality of the approximation. 
00006 //
00007 //For simplicity, we use a known function to create test and training data.
00008 //In reality this function is usually not known, and the data comes e.g. 
00009 //from measurements.
00010 //
00011 //Axel Naumann, 2005-02-02
00012 
00013 Double_t theUnknownFunction(Double_t x, Double_t y) {
00014    return sin((1.7+x)*(x-0.3)-2.3*(y+0.7));
00015 }
00016 
00017 void mlpRegression() {
00018    // create a tree with train and test data.
00019    // we have two input parameters x and y, 
00020    // and one output value f(x,y)
00021    TNtuple* t=new TNtuple("tree","tree","x:y:f");
00022    TRandom r;
00023    for (Int_t i=0; i<1000; i++) {
00024       Float_t x=r.Rndm();
00025       Float_t y=r.Rndm();
00026       // fill it with x, y, and f(x,y) - usually this function
00027       // is not known, and the value of f given an x and a y comes 
00028       // e.g. from measurements
00029       t->Fill(x,y,theUnknownFunction(x,y));
00030    }
00031 
00032    // create ANN
00033    TMultiLayerPerceptron* mlp=new TMultiLayerPerceptron("x,y:10:8:f",t,
00034       "Entry$%2","(Entry$%2)==0");
00035    mlp->Train(150,"graph update=10");
00036 
00037    // analyze it
00038    TMLPAnalyzer* mlpa=new TMLPAnalyzer(mlp);
00039    mlpa->GatherInformations();
00040    mlpa->CheckNetwork();
00041    mlpa->DrawDInputs();
00042 
00043    // draw statistics shows the quality of the ANN's approximation
00044    TCanvas* cIO=new TCanvas("TruthDeviation", "TruthDeviation");
00045    cIO->Divide(2,2);
00046    cIO->cd(1);
00047    // draw the difference between the ANN's output for (x,y) and 
00048    // the true value f(x,y), vs. f(x,y), as TProfiles
00049    mlpa->DrawTruthDeviations();
00050 
00051    cIO->cd(2);
00052    // draw the difference between the ANN's output for (x,y) and 
00053    // the true value f(x,y), vs. x, and vs. y, as TProfiles
00054    mlpa->DrawTruthDeviationInsOut();
00055 
00056    cIO->cd(3);
00057    // draw a box plot of the ANN's output for (x,y) vs f(x,y)
00058    mlpa->GetIOTree()->Draw("Out.Out0-True.True0:True.True0>>hDelta","","goff");
00059    TH2F* hDelta=(TH2F*)gDirectory->Get("hDelta");
00060    hDelta->SetTitle("Difference between ANN output and truth vs. truth");
00061    hDelta->Draw("BOX");
00062 
00063    cIO->cd(4);
00064    // draw difference of ANN's output for (x,y) vs f(x,y) assuming
00065    // the ANN can extrapolate
00066    Double_t vx[225];
00067    Double_t vy[225];
00068    Double_t delta[225];
00069    Double_t v[2];
00070    for (Int_t ix=0; ix<15; ix++) {
00071       v[0]=ix/5.-1.;
00072       for (Int_t iy=0; iy<15; iy++) {
00073          v[1]=iy/5.-1.;
00074          Int_t idx=ix*15+iy;
00075          vx[idx]=v[0];
00076          vy[idx]=v[1];
00077          delta[idx]=mlp->Evaluate(0, v)-theUnknownFunction(v[0],v[1]);
00078       }
00079    }
00080    TGraph2D* g2Extrapolate=new TGraph2D("ANN extrapolation",
00081                                         "ANN extrapolation, ANN output - truth",
00082                                         225, vx, vy, delta);
00083    g2Extrapolate->Draw("TRI2");
00084 }

Generated on Tue Jul 5 15:44:51 2011 for ROOT_528-00b_version by  doxygen 1.5.1