TNeuron.h

Go to the documentation of this file.
00001 // @(#)root/mlp:$Id: TNeuron.h 22428 2008-03-03 18:17:03Z brun $
00002 // Author: Christophe.Delaere@cern.ch   20/07/03
00003 
00004 /*************************************************************************
00005  * Copyright (C) 1995-2003, Rene Brun and Fons Rademakers.               *
00006  * All rights reserved.                                                  *
00007  *                                                                       *
00008  * For the licensing terms see $ROOTSYS/LICENSE.                         *
00009  * For the list of contributors see $ROOTSYS/README/CREDITS.             *
00010  *************************************************************************/
00011 
00012 #ifndef ROOT_TNeuron
00013 #define ROOT_TNeuron
00014 
00015 #ifndef ROOT_TNamed
00016 #include "TNamed.h"
00017 #endif
00018 #ifndef ROOT_TObjArray
00019 #include "TObjArray.h"
00020 #endif
00021 
00022 class TTreeFormula;
00023 class TSynapse;
00024 class TBranch;
00025 class TTree;
00026 class TFormula;
00027 
00028 //____________________________________________________________________
00029 //
00030 // TNeuron
00031 //
00032 // This class decribes an elementary neuron, which is the basic
00033 // element for a Neural Network.
00034 // A network is build connecting neurons by synapses.
00035 // There are different types of neurons: linear (a+bx),
00036 // sigmoid (1/(1+exp(-x)), tanh or gaussian.
00037 // In a Multi Layer Perceptron, the input layer is made of
00038 // inactive neurons (returning the normalized input), hidden layers
00039 // are made of sigmoids and output neurons are linear.
00040 //
00041 // This implementation contains several methods to compute the value,
00042 // the derivative, the DeDw, ...
00043 // Values are stored in local buffers. The SetNewEvent() method is
00044 // there to inform buffered values are outdated.
00045 //
00046 //____________________________________________________________________
00047 
00048 class TNeuron : public TNamed {
00049    friend class TSynapse;
00050 
00051  public:
00052    enum ENeuronType { kOff, kLinear, kSigmoid, kTanh, kGauss, kSoftmax, kExternal };
00053 
00054    TNeuron(ENeuronType type = kSigmoid, 
00055            const char* name = "", const char* title = "", 
00056            const char* extF = "", const char* extD  = "" );
00057    virtual ~TNeuron() {}
00058    inline TSynapse* GetPre(Int_t n) const { return (TSynapse*) fpre.At(n); }
00059    inline TSynapse* GetPost(Int_t n) const { return (TSynapse*) fpost.At(n); }
00060    inline TNeuron* GetInLayer(Int_t n) const { return (TNeuron*) flayer.At(n); }
00061    TTreeFormula* UseBranch(TTree*, const char*);
00062    Double_t GetInput() const;
00063    Double_t GetValue() const;
00064    Double_t GetDerivative() const;
00065    Double_t GetError() const;
00066    Double_t GetTarget() const;
00067    Double_t GetDeDw() const;
00068    Double_t GetBranch() const;
00069    ENeuronType GetType() const;
00070    void SetWeight(Double_t w);
00071    inline Double_t GetWeight() const { return fWeight; }
00072    void SetNormalisation(Double_t mean, Double_t RMS);
00073    inline const Double_t* GetNormalisation() const { return fNorm; }
00074    void SetNewEvent() const;
00075    void SetDEDw(Double_t in);
00076    inline Double_t GetDEDw() const { return fDEDw; }
00077    void ForceExternalValue(Double_t value);
00078    void AddInLayer(TNeuron*);
00079 
00080  protected:
00081    Double_t Sigmoid(Double_t x) const;
00082    Double_t DSigmoid(Double_t x) const;
00083    void AddPre(TSynapse*);
00084    void AddPost(TSynapse*);
00085 
00086  private:
00087    TNeuron(const TNeuron&); // Not implemented
00088    TNeuron& operator=(const TNeuron&); // Not implemented
00089 
00090    TObjArray fpre;        // pointers to the previous level in a network
00091    TObjArray fpost;       // pointers to the next level in a network
00092    TObjArray flayer;      // pointers to the current level in a network (neurons, not synapses)
00093    Double_t fWeight;      // weight used for computation
00094    Double_t fNorm[2];     // normalisation to mean=0, RMS=1.
00095    ENeuronType fType;     // neuron type
00096    TFormula* fExtF;       // function   (external mode)
00097    TFormula* fExtD;       // derivative (external mode)
00098    //buffers
00099    //should be mutable when supported by all compilers
00100    TTreeFormula* fFormula;//! formula to be used for inputs and outputs
00101    Int_t fIndex;          //! index in the formula
00102    Bool_t fNewInput;      //! do we need to compute fInput again ?
00103    Double_t fInput;       //! buffer containing the last neuron input
00104    Bool_t fNewValue;      //! do we need to compute fValue again ?
00105    Double_t fValue;       //! buffer containing the last neuron output
00106    Bool_t fNewDeriv;      //! do we need to compute fDerivative again ?
00107    Double_t fDerivative;  //! buffer containing the last neuron derivative
00108    Bool_t fNewDeDw;       //! do we need to compute fDeDw again ?
00109    Double_t fDeDw;        //! buffer containing the last derivative of the error
00110    Double_t fDEDw;        //! buffer containing the sum over all examples of DeDw
00111 
00112    ClassDef(TNeuron, 4)   // Neuron for MultiLayerPerceptrons
00113 };
00114 
00115 #endif

Generated on Tue Jul 5 14:27:39 2011 for ROOT_528-00b_version by  doxygen 1.5.1