Logo ROOT   6.30.04
Reference Guide
 All Namespaces Files Pages
TMultiLayerPerceptron.h
Go to the documentation of this file.
1 // @(#)root/mlp:$Id$
2 // Author: Christophe.Delaere@cern.ch 20/07/03
3 
4 /*************************************************************************
5  * Copyright (C) 1995-2003, Rene Brun and Fons Rademakers. *
6  * All rights reserved. *
7  * *
8  * For the licensing terms see $ROOTSYS/LICENSE. *
9  * For the list of contributors see $ROOTSYS/README/CREDITS. *
10  *************************************************************************/
11 
12 #ifndef ROOT_TMultiLayerPerceptron
13 #define ROOT_TMultiLayerPerceptron
14 
15 #include "TObject.h"
16 #include "TString.h"
17 #include "TObjArray.h"
18 #include "TMatrixD.h"
19 #include "TNeuron.h"
20 
21 class TTree;
22 class TEventList;
23 class TTreeFormula;
24 class TTreeFormulaManager;
25 
26 class TMultiLayerPerceptron : public TObject {
27  friend class TMLPAnalyzer;
28 
29  public:
30  enum ELearningMethod { kStochastic, kBatch, kSteepestDescent,
31  kRibierePolak, kFletcherReeves, kBFGS };
32  enum EDataSet { kTraining, kTest };
33  TMultiLayerPerceptron();
34  TMultiLayerPerceptron(const char* layout, TTree* data = 0,
35  const char* training = "Entry$%2==0",
36  const char* test = "",
37  TNeuron::ENeuronType type = TNeuron::kSigmoid,
38  const char* extF = "", const char* extD = "");
39  TMultiLayerPerceptron(const char* layout,
40  const char* weight, TTree* data = 0,
41  const char* training = "Entry$%2==0",
42  const char* test = "",
43  TNeuron::ENeuronType type = TNeuron::kSigmoid,
44  const char* extF = "", const char* extD = "");
45  TMultiLayerPerceptron(const char* layout, TTree* data,
46  TEventList* training,
47  TEventList* test,
48  TNeuron::ENeuronType type = TNeuron::kSigmoid,
49  const char* extF = "", const char* extD = "");
50  TMultiLayerPerceptron(const char* layout,
51  const char* weight, TTree* data,
52  TEventList* training,
53  TEventList* test,
54  TNeuron::ENeuronType type = TNeuron::kSigmoid,
55  const char* extF = "", const char* extD = "");
56  virtual ~TMultiLayerPerceptron();
57  void SetData(TTree*);
58  void SetTrainingDataSet(TEventList* train);
59  void SetTestDataSet(TEventList* test);
60  void SetTrainingDataSet(const char* train);
61  void SetTestDataSet(const char* test);
62  void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method);
63  void SetEventWeight(const char*);
64  void Train(Int_t nEpoch, Option_t* option = "text", Double_t minE=0);
65  Double_t Result(Int_t event, Int_t index = 0) const;
66  Double_t GetError(Int_t event) const;
67  Double_t GetError(TMultiLayerPerceptron::EDataSet set) const;
68  void ComputeDEDw() const;
69  void Randomize() const;
70  void SetEta(Double_t eta);
71  void SetEpsilon(Double_t eps);
72  void SetDelta(Double_t delta);
73  void SetEtaDecay(Double_t ed);
74  void SetTau(Double_t tau);
75  void SetReset(Int_t reset);
76  inline Double_t GetEta() const { return fEta; }
77  inline Double_t GetEpsilon() const { return fEpsilon; }
78  inline Double_t GetDelta() const { return fDelta; }
79  inline Double_t GetEtaDecay() const { return fEtaDecay; }
80  TMultiLayerPerceptron::ELearningMethod GetLearningMethod() const { return fLearningMethod; }
81  inline Double_t GetTau() const { return fTau; }
82  inline Int_t GetReset() const { return fReset; }
83  inline TString GetStructure() const { return fStructure; }
84  inline TNeuron::ENeuronType GetType() const { return fType; }
85  void DrawResult(Int_t index = 0, Option_t* option = "test") const;
86  Bool_t DumpWeights(Option_t* filename = "-") const;
87  Bool_t LoadWeights(Option_t* filename = "");
88  Double_t Evaluate(Int_t index, Double_t* params) const;
89  void Export(Option_t* filename = "NNfunction", Option_t* language = "C++") const;
90  virtual void Draw(Option_t *option="");
91 
92  protected:
93  void AttachData();
94  void BuildNetwork();
95  void GetEntry(Int_t) const;
96  // it's a choice not to force learning function being const, even if possible
97  void MLP_Stochastic(Double_t*);
98  void MLP_Batch(Double_t*);
99  Bool_t LineSearch(Double_t*, Double_t*);
100  void SteepestDir(Double_t*);
101  void ConjugateGradientsDir(Double_t*, Double_t);
102  void SetGammaDelta(TMatrixD&, TMatrixD&, Double_t*);
103  bool GetBFGSH(TMatrixD&, TMatrixD &, TMatrixD&);
104  void BFGSDir(TMatrixD&, Double_t*);
105  Double_t DerivDir(Double_t*);
106  Double_t GetCrossEntropyBinary() const;
107  Double_t GetCrossEntropy() const;
108  Double_t GetSumSquareError() const;
109 
110  private:
111  TMultiLayerPerceptron(const TMultiLayerPerceptron&); // Not implemented
112  TMultiLayerPerceptron& operator=(const TMultiLayerPerceptron&); // Not implemented
113  void ExpandStructure();
114  void BuildFirstLayer(TString&);
115  void BuildHiddenLayers(TString&);
116  void BuildOneHiddenLayer(const TString& sNumNodes, Int_t& layer,
117  Int_t& prevStart, Int_t& prevStop,
118  Bool_t lastLayer);
119  void BuildLastLayer(TString&, Int_t);
120  void Shuffle(Int_t*, Int_t) const;
121  void MLP_Line(Double_t*, Double_t*, Double_t);
122 
123  TTree* fData; ///<! pointer to the tree used as datasource
124  Int_t fCurrentTree; ///<! index of the current tree in a chain
125  Double_t fCurrentTreeWeight; ///<! weight of the current tree in a chain
126  TObjArray fNetwork; ///< Collection of all the neurons in the network
127  TObjArray fFirstLayer; ///< Collection of the input neurons; subset of fNetwork
128  TObjArray fLastLayer; ///< Collection of the output neurons; subset of fNetwork
129  TObjArray fSynapses; ///< Collection of all the synapses in the network
130  TString fStructure; ///< String containing the network structure
131  TString fWeight; ///< String containing the event weight
132  TNeuron::ENeuronType fType; ///< Type of hidden neurons
133  TNeuron::ENeuronType fOutType; ///< Type of output neurons
134  TString fextF; ///< String containing the function name
135  TString fextD; ///< String containing the derivative name
136  TEventList *fTraining; ///<! EventList defining the events in the training dataset
137  TEventList *fTest; ///<! EventList defining the events in the test dataset
138  ELearningMethod fLearningMethod; ///<! The Learning Method
139  TTreeFormula* fEventWeight; ///<! formula representing the event weight
140  TTreeFormulaManager* fManager; ///<! TTreeFormulaManager for the weight and neurons
141  Double_t fEta; ///<! Eta - used in stochastic minimisation - Default=0.1
142  Double_t fEpsilon; ///<! Epsilon - used in stochastic minimisation - Default=0.
143  Double_t fDelta; ///<! Delta - used in stochastic minimisation - Default=0.
144  Double_t fEtaDecay; ///<! EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
145  Double_t fTau; ///<! Tau - used in line search - Default=3.
146  Double_t fLastAlpha; ///<! internal parameter used in line search
147  Int_t fReset; ///<! number of epochs between two resets of the search direction to the steepest descent - Default=50
148  Bool_t fTrainingOwner; ///<! internal flag whether one has to delete fTraining or not
149  Bool_t fTestOwner; ///<! internal flag whether one has to delete fTest or not
150 
151  ClassDef(TMultiLayerPerceptron, 4) // a Neural Network
152 };
153 
154 #endif