00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012 #ifndef ROOT_TMultiLayerPerceptron
00013 #define ROOT_TMultiLayerPerceptron
00014
00015 #ifndef ROOT_TObject
00016 #include "TObject.h"
00017 #endif
00018 #ifndef ROOT_TString
00019 #include "TString.h"
00020 #endif
00021 #ifndef ROOT_TObjArray
00022 #include "TObjArray.h"
00023 #endif
00024 #ifndef ROOT_TMatrixD
00025 #include "TMatrixD.h"
00026 #endif
00027 #ifndef ROOT_TNeuron
00028 #include "TNeuron.h"
00029 #endif
00030
00031 class TTree;
00032 class TEventList;
00033 class TTreeFormula;
00034 class TTreeFormulaManager;
00035
00036
00037
00038
00039
00040
00041
00042
00043
00044
00045
00046
00047
00048
00049
00050
00051
00052
00053
00054
00055
00056
00057
00058 class TMultiLayerPerceptron : public TObject {
00059 friend class TMLPAnalyzer;
00060
00061 public:
00062 enum ELearningMethod { kStochastic, kBatch, kSteepestDescent,
00063 kRibierePolak, kFletcherReeves, kBFGS };
00064 enum EDataSet { kTraining, kTest };
00065 TMultiLayerPerceptron();
00066 TMultiLayerPerceptron(const char* layout, TTree* data = 0,
00067 const char* training = "Entry$%2==0",
00068 const char* test = "",
00069 TNeuron::ENeuronType type = TNeuron::kSigmoid,
00070 const char* extF = "", const char* extD = "");
00071 TMultiLayerPerceptron(const char* layout,
00072 const char* weight, TTree* data = 0,
00073 const char* training = "Entry$%2==0",
00074 const char* test = "",
00075 TNeuron::ENeuronType type = TNeuron::kSigmoid,
00076 const char* extF = "", const char* extD = "");
00077 TMultiLayerPerceptron(const char* layout, TTree* data,
00078 TEventList* training,
00079 TEventList* test,
00080 TNeuron::ENeuronType type = TNeuron::kSigmoid,
00081 const char* extF = "", const char* extD = "");
00082 TMultiLayerPerceptron(const char* layout,
00083 const char* weight, TTree* data,
00084 TEventList* training,
00085 TEventList* test,
00086 TNeuron::ENeuronType type = TNeuron::kSigmoid,
00087 const char* extF = "", const char* extD = "");
00088 virtual ~TMultiLayerPerceptron();
00089 void SetData(TTree*);
00090 void SetTrainingDataSet(TEventList* train);
00091 void SetTestDataSet(TEventList* test);
00092 void SetTrainingDataSet(const char* train);
00093 void SetTestDataSet(const char* test);
00094 void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method);
00095 void SetEventWeight(const char*);
00096 void Train(Int_t nEpoch, Option_t* option = "text", Double_t minE=0);
00097 Double_t Result(Int_t event, Int_t index = 0) const;
00098 Double_t GetError(Int_t event) const;
00099 Double_t GetError(TMultiLayerPerceptron::EDataSet set) const;
00100 void ComputeDEDw() const;
00101 void Randomize() const;
00102 void SetEta(Double_t eta);
00103 void SetEpsilon(Double_t eps);
00104 void SetDelta(Double_t delta);
00105 void SetEtaDecay(Double_t ed);
00106 void SetTau(Double_t tau);
00107 void SetReset(Int_t reset);
00108 inline Double_t GetEta() const { return fEta; }
00109 inline Double_t GetEpsilon() const { return fEpsilon; }
00110 inline Double_t GetDelta() const { return fDelta; }
00111 inline Double_t GetEtaDecay() const { return fEtaDecay; }
00112 inline Double_t GetTau() const { return fTau; }
00113 inline Int_t GetReset() const { return fReset; }
00114 inline TString GetStructure() const { return fStructure; }
00115 inline TNeuron::ENeuronType GetType() const { return fType; }
00116 void DrawResult(Int_t index = 0, Option_t* option = "test") const;
00117 void DumpWeights(Option_t* filename = "-") const;
00118 void LoadWeights(Option_t* filename = "");
00119 Double_t Evaluate(Int_t index, Double_t* params) const;
00120 void Export(Option_t* filename = "NNfunction", Option_t* language = "C++") const;
00121 virtual void Draw(Option_t *option="");
00122
00123 protected:
00124 void AttachData();
00125 void BuildNetwork();
00126 void GetEntry(Int_t) const;
00127
00128 void MLP_Stochastic(Double_t*);
00129 void MLP_Batch(Double_t*);
00130 Bool_t LineSearch(Double_t*, Double_t*);
00131 void SteepestDir(Double_t*);
00132 void ConjugateGradientsDir(Double_t*, Double_t);
00133 void SetGammaDelta(TMatrixD&, TMatrixD&, Double_t*);
00134 bool GetBFGSH(TMatrixD&, TMatrixD &, TMatrixD&);
00135 void BFGSDir(TMatrixD&, Double_t*);
00136 Double_t DerivDir(Double_t*);
00137 Double_t GetCrossEntropyBinary() const;
00138 Double_t GetCrossEntropy() const;
00139 Double_t GetSumSquareError() const;
00140
00141 private:
00142 TMultiLayerPerceptron(const TMultiLayerPerceptron&);
00143 TMultiLayerPerceptron& operator=(const TMultiLayerPerceptron&);
00144 void ExpandStructure();
00145 void BuildFirstLayer(TString&);
00146 void BuildHiddenLayers(TString&);
00147 void BuildOneHiddenLayer(const TString& sNumNodes, Int_t& layer,
00148 Int_t& prevStart, Int_t& prevStop,
00149 Bool_t lastLayer);
00150 void BuildLastLayer(TString&, Int_t);
00151 void Shuffle(Int_t*, Int_t) const;
00152 void MLP_Line(Double_t*, Double_t*, Double_t);
00153
00154 TTree* fData;
00155 Int_t fCurrentTree;
00156 Double_t fCurrentTreeWeight;
00157 TObjArray fNetwork;
00158 TObjArray fFirstLayer;
00159 TObjArray fLastLayer;
00160 TObjArray fSynapses;
00161 TString fStructure;
00162 TString fWeight;
00163 TNeuron::ENeuronType fType;
00164 TNeuron::ENeuronType fOutType;
00165 TString fextF;
00166 TString fextD;
00167 TEventList *fTraining;
00168 TEventList *fTest;
00169 ELearningMethod fLearningMethod;
00170 TTreeFormula* fEventWeight;
00171 TTreeFormulaManager* fManager;
00172 Double_t fEta;
00173 Double_t fEpsilon;
00174 Double_t fDelta;
00175 Double_t fEtaDecay;
00176 Double_t fTau;
00177 Double_t fLastAlpha;
00178 Int_t fReset;
00179 Bool_t fTrainingOwner;
00180 Bool_t fTestOwner;
00181 ClassDef(TMultiLayerPerceptron, 4)
00182 };
00183
00184 #endif