Logo ROOT   6.30.04
Reference Guide
 All Namespaces Files Pages
TNeuron.cxx
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Matt Jachowski
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : TNeuron *
8  * Web : http://tmva.sourceforge.net *
9  * *
10  * Description: *
11  * Implementation (see header for description) *
12  * *
13  * Authors (alphabetical): *
14  * Matt Jachowski <jachowski@stanford.edu> - Stanford University, USA *
15  * *
16  * Copyright (c) 2005: *
17  * CERN, Switzerland *
18  * *
19  * Redistribution and use in source and binary forms, with or without *
20  * modification, are permitted according to the terms listed in LICENSE *
21  * (http://tmva.sourceforge.net/LICENSE) *
22  **********************************************************************************/
23 
24 /*! \class TMVA::TNeuron
25 \ingroup TMVA
26 Neuron class used by TMVA artificial neural network methods
27 */
28 
29 #include "TMVA/TNeuron.h"
30 
31 #include "TMVA/MsgLogger.h"
32 #include "TMVA/TActivation.h"
33 #include "TMVA/Tools.h"
34 #include "TMVA/TNeuronInput.h"
35 #include "TMVA/Types.h"
36 
37 #include "TH1D.h"
38 #include "ThreadLocalStorage.h"
39 #include "TObjArray.h"
40 
41 static const Int_t UNINITIALIZED = -1;
42 
43 using std::vector;
44 
45 ClassImp(TMVA::TNeuron);
46 
47 ////////////////////////////////////////////////////////////////////////////////
48 /// standard constructor
49 
50 TMVA::TNeuron::TNeuron()
51 {
52  InitNeuron();
53 }
54 
55 ////////////////////////////////////////////////////////////////////////////////
56 /// destructor
57 
58 TMVA::TNeuron::~TNeuron()
59 {
60  if (fLinksIn != NULL) delete fLinksIn;
61  if (fLinksOut != NULL) delete fLinksOut;
62 }
63 
64 ////////////////////////////////////////////////////////////////////////////////
65 /// initialize the neuron, most variables still need to be set via setters
66 
67 void TMVA::TNeuron::InitNeuron()
68 {
69  fLinksIn = new TObjArray();
70  fLinksOut = new TObjArray();
71  fValue = UNINITIALIZED;
72  fActivationValue = UNINITIALIZED;
73  fDelta = UNINITIALIZED;
74  fDEDw = UNINITIALIZED;
75  fError = UNINITIALIZED;
76  fActivation = NULL;
77  fForcedValue = kFALSE;
78  fInputCalculator = NULL;
79 }
80 
81 ////////////////////////////////////////////////////////////////////////////////
82 /// force the value, typically for input and bias neurons
83 
84 void TMVA::TNeuron::ForceValue(Double_t value)
85 {
86  fValue = value;
87  fForcedValue = kTRUE;
88 }
89 
90 ////////////////////////////////////////////////////////////////////////////////
91 /// calculate neuron input
92 
93 void TMVA::TNeuron::CalculateValue()
94 {
95  if (fForcedValue) return;
96  fValue = fInputCalculator->GetInput(this);
97 }
98 
99 ////////////////////////////////////////////////////////////////////////////////
100 /// calculate neuron activation/output
101 
102 void TMVA::TNeuron::CalculateActivationValue()
103 {
104  if (fActivation == NULL) {
105  PrintMessage( kWARNING ,"No activation equation specified." );
106  fActivationValue = UNINITIALIZED;
107  return;
108  }
109  fActivationValue = fActivation->Eval(fValue);
110 }
111 
112 ////////////////////////////////////////////////////////////////////////////////
113 /// calculate error field
114 
115 void TMVA::TNeuron::CalculateDelta()
116 {
117  // no need to adjust input neurons
118  if (IsInputNeuron()) {
119  fDelta = 0.0;
120  return;
121  }
122 
123  Double_t error;
124 
125  // output neuron should have error set all ready
126  if (IsOutputNeuron()) error = fError;
127 
128  // need to calculate error for any other neuron
129  else {
130  error = 0.0;
131  TSynapse* synapse = NULL;
132  // Replaced TObjArrayIter pointer by object, as creating it on the stack
133  // is much faster (5-10% improvement seen) than re-allocating the new
134  // memory for the pointer each time. Thanks to Peter Elmer who pointed this out
135  // TObjArrayIter* iter = (TObjArrayIter*)fLinksOut->MakeIterator();
136  TObjArrayIter iter(fLinksOut);
137  while (true) {
138  synapse = (TSynapse*) iter.Next();
139  if (synapse == NULL) break;
140  error += synapse->GetWeightedDelta();
141  }
142 
143  }
144 
145  fDelta = error * fActivation->EvalDerivative(GetValue());
146 }
147 
148 ////////////////////////////////////////////////////////////////////////////////
149 /// set input calculator
150 
151 void TMVA::TNeuron::SetInputCalculator(TNeuronInput* calculator)
152 {
153  if (fInputCalculator != NULL) delete fInputCalculator;
154  fInputCalculator = calculator;
155 }
156 
157 ////////////////////////////////////////////////////////////////////////////////
158 /// set activation equation
159 
160 void TMVA::TNeuron::SetActivationEqn(TActivation* activation)
161 {
162  if (fActivation != NULL) delete fActivation;
163  fActivation = activation;
164 }
165 
166 ////////////////////////////////////////////////////////////////////////////////
167 /// add synapse as a pre-link to this neuron
168 
169 void TMVA::TNeuron::AddPreLink(TSynapse* pre)
170 {
171  if (IsInputNeuron()) return;
172  fLinksIn->Add(pre);
173 }
174 
175 ////////////////////////////////////////////////////////////////////////////////
176 /// add synapse as a post-link to this neuron
177 
178 void TMVA::TNeuron::AddPostLink(TSynapse* post)
179 {
180  if (IsOutputNeuron()) return;
181  fLinksOut->Add(post);
182 }
183 
184 ////////////////////////////////////////////////////////////////////////////////
185 /// delete all pre-links
186 
187 void TMVA::TNeuron::DeletePreLinks()
188 {
189  DeleteLinksArray(fLinksIn);
190 }
191 
192 ////////////////////////////////////////////////////////////////////////////////
193 /// delete an array of TSynapses
194 
195 void TMVA::TNeuron::DeleteLinksArray(TObjArray*& links)
196 {
197  if (links == NULL) return;
198 
199  TSynapse* synapse = NULL;
200  Int_t numLinks = links->GetEntriesFast();
201  for (Int_t i=0; i<numLinks; i++) {
202  synapse = (TSynapse*)links->At(i);
203  if (synapse != NULL) delete synapse;
204  }
205  delete links;
206  links = NULL;
207 }
208 
209 ////////////////////////////////////////////////////////////////////////////////
210 /// set error, this should only be done for an output neuron
211 
212 void TMVA::TNeuron::SetError(Double_t error)
213 {
214  if (!IsOutputNeuron())
215  PrintMessage( kWARNING, "Warning! Setting an error on a non-output neuron is probably not what you want to do." );
216 
217  fError = error;
218 }
219 
220 ////////////////////////////////////////////////////////////////////////////////
221 /// update and adjust the pre-synapses for each neuron (input neuron has no pre-synapse)
222 /// this method should only be called in batch mode
223 
224 void TMVA::TNeuron::UpdateSynapsesBatch()
225 {
226  if (IsInputNeuron()) return;
227 
228  TSynapse* synapse = NULL;
229  TObjArrayIter iter(fLinksIn);
230  while (true) {
231  synapse = (TSynapse*) iter.Next();
232  if (synapse == NULL) break;
233  synapse->CalculateDelta();
234  }
235 
236 }
237 
238 ////////////////////////////////////////////////////////////////////////////////
239 /// update the pre-synapses for each neuron (input neuron has no pre-synapse)
240 /// this method should only be called in sequential mode
241 
242 void TMVA::TNeuron::UpdateSynapsesSequential()
243 {
244  if (IsInputNeuron()) return;
245 
246  TSynapse* synapse = NULL;
247  TObjArrayIter iter(fLinksIn);
248 
249  while (true) {
250  synapse = (TSynapse*) iter.Next();
251  if (synapse == NULL) break;
252  synapse->InitDelta();
253  synapse->CalculateDelta();
254  synapse->AdjustWeight();
255  }
256 
257 }
258 
259 ////////////////////////////////////////////////////////////////////////////////
260 /// adjust the pre-synapses' weights for each neuron (input neuron has no pre-synapse)
261 /// this method should only be called in batch mode
262 
263 void TMVA::TNeuron::AdjustSynapseWeights()
264 {
265  if (IsInputNeuron()) return;
266 
267  TSynapse* synapse = NULL;
268  TObjArrayIter iter(fLinksIn);
269 
270 
271  while (true) {
272  synapse = (TSynapse*) iter.Next();
273  if (synapse == NULL) break;
274  synapse->AdjustWeight();
275  }
276 
277 }
278 
279 ////////////////////////////////////////////////////////////////////////////////
280 /// initialize the error fields of all pre-neurons
281 /// this method should only be called in batch mode
282 
283 void TMVA::TNeuron::InitSynapseDeltas()
284 {
285  // an input neuron has no pre-weights to adjust
286  if (IsInputNeuron()) return;
287 
288  TSynapse* synapse = NULL;
289  TObjArrayIter iter(fLinksIn);
290 
291  while (true) {
292  synapse = (TSynapse*) iter.Next();
293 
294  if (synapse == NULL) break;
295  synapse->InitDelta();
296  }
297 
298 }
299 
300 ////////////////////////////////////////////////////////////////////////////////
301 /// print an array of TSynapses, for debugging
302 
303 void TMVA::TNeuron::PrintLinks(TObjArray* links) const
304 {
305  if (links == NULL) {
306  Log() << kDEBUG << "\t\t\t<none>" << Endl;
307  return;
308  }
309 
310  TSynapse* synapse;
311 
312  Int_t numLinks = links->GetEntriesFast();
313  for (Int_t i = 0; i < numLinks; i++) {
314  synapse = (TSynapse*)links->At(i);
315  Log() << kDEBUG <<
316  "\t\t\tweighta: " << synapse->GetWeight()
317  << "\t\tw-value: " << synapse->GetWeightedValue()
318  << "\t\tw-delta: " << synapse->GetWeightedDelta()
319  << "\t\tl-rate: " << synapse->GetLearningRate()
320  << Endl;
321  }
322 }
323 
324 ////////////////////////////////////////////////////////////////////////////////
325 /// print activation equation, for debugging
326 
327 void TMVA::TNeuron::PrintActivationEqn()
328 {
329  if (fActivation != NULL) Log() << kDEBUG << fActivation->GetExpression() << Endl;
330  else Log() << kDEBUG << "<none>" << Endl;
331 }
332 
333 ////////////////////////////////////////////////////////////////////////////////
334 /// print message, for debugging
335 
336 void TMVA::TNeuron::PrintMessage( EMsgType type, TString message)
337 {
338  Log() << type << message << Endl;
339 }
340 
341 ////////////////////////////////////////////////////////////////////////////////
342 
343 TMVA::MsgLogger& TMVA::TNeuron::Log() const
344 {
345  TTHREAD_TLS_DECL_ARG2(MsgLogger,logger,"TNeuron",kDEBUG); //! message logger, static to save resources
346  return logger;
347 }