34 #ifndef ROOT_TMVA_MethodANNBase
35 #define ROOT_TMVA_MethodANNBase
62 class MethodANNBase :
public MethodBase {
67 MethodANNBase(
const TString& jobName,
68 Types::EMVA methodType,
69 const TString& methodTitle,
71 const TString& theOption );
73 MethodANNBase( Types::EMVA methodType,
75 const TString& theWeightFile);
77 virtual ~MethodANNBase();
83 void SetActivation(TActivation* activation) {
84 if (fActivation !=
nullptr)
delete fActivation;
85 fActivation = activation;
87 void SetNeuronInputCalculator(TNeuronInput* inputCalculator) {
88 if (fInputCalculator !=
nullptr)
delete fInputCalculator;
89 fInputCalculator = inputCalculator;
93 virtual void Train() = 0;
96 virtual void PrintNetwork()
const;
106 template <
typename WriteIterator>
107 void GetLayerActivation (
size_t layer, WriteIterator writeIterator);
109 using MethodBase::ReadWeightsFromStream;
112 void AddWeightsXMLTo(
void* parent )
const;
113 void ReadWeightsFromXML(
void* wghtnode );
116 virtual void ReadWeightsFromStream( std::istream& istr );
119 virtual Double_t GetMvaValue( Double_t* err = 0, Double_t* errUpper = 0 );
121 virtual const std::vector<Float_t> &GetRegressionValues();
123 virtual const std::vector<Float_t> &GetMulticlassValues();
126 virtual void WriteMonitoringHistosToFile()
const;
129 const Ranking* CreateRanking();
132 virtual void DeclareOptions();
133 virtual void ProcessOptions();
135 Bool_t Debug()
const;
137 enum EEstimator { kMSE=0,kCE};
143 virtual void MakeClassSpecific( std::ostream&,
const TString& )
const;
145 std::vector<Int_t>* ParseLayoutString( TString layerSpec );
146 virtual void BuildNetwork( std::vector<Int_t>* layout, std::vector<Double_t>* weights=NULL,
147 Bool_t fromFile = kFALSE );
148 void ForceNetworkInputs(
const Event* ev, Int_t ignoreIndex = -1 );
149 Double_t GetNetworkOutput() {
return GetOutputNeuron()->GetActivationValue(); }
152 void PrintMessage( TString message, Bool_t force = kFALSE )
const;
153 void ForceNetworkCalculations();
154 void WaitForKeyboard();
157 Int_t NumCycles() {
return fNcycles; }
158 TNeuron* GetInputNeuron (Int_t index) {
return (TNeuron*)fInputLayer->At(index); }
159 TNeuron* GetOutputNeuron(Int_t index = 0) {
return fOutputNeurons.at(index); }
162 TObjArray* fSynapses;
163 TActivation* fActivation;
164 TActivation* fOutput;
165 TActivation* fIdentity;
167 TNeuronInput* fInputCalculator;
169 std::vector<Int_t> fRegulatorIdx;
170 std::vector<Double_t> fRegulators;
171 EEstimator fEstimator;
175 TH1F* fEstimatorHistTrain;
176 TH1F* fEstimatorHistTest;
179 void CreateWeightMonitoringHists(
const TString& bulkname, std::vector<TH1*>* hv = 0 )
const;
180 std::vector<TH1*> fEpochMonHistS;
181 std::vector<TH1*> fEpochMonHistB;
182 std::vector<TH1*> fEpochMonHistW;
186 TMatrixD fInvHessian;
195 TString fNeuronInputType;
201 void BuildLayers(std::vector<Int_t>* layout, Bool_t from_file =
false);
202 void BuildLayer(Int_t numNeurons, TObjArray* curLayer, TObjArray* prevLayer,
203 Int_t layerIndex, Int_t numLayers, Bool_t from_file =
false);
204 void AddPreLinks(TNeuron* neuron, TObjArray* prevLayer);
208 void ForceWeights(std::vector<Double_t>* weights);
211 void DeleteNetwork();
212 void DeleteNetworkLayer(TObjArray*& layer);
215 void PrintLayer(TObjArray* layer)
const;
216 void PrintNeuron(TNeuron* neuron)
const;
219 TObjArray* fInputLayer;
220 std::vector<TNeuron*> fOutputNeurons;
224 static const Bool_t fgDEBUG = kTRUE;
226 ClassDef(MethodANNBase,0);
231 template <
typename WriteIterator>
232 inline void MethodANNBase::GetLayerActivation (
size_t layerNumber, WriteIterator writeIterator)
239 if (layerNumber >= (
size_t)fNetwork->GetEntriesFast())
242 TObjArray* layer = (TObjArray*)fNetwork->At(layerNumber);
243 UInt_t nNodes = layer->GetEntriesFast();
244 for (UInt_t iNode = 0; iNode < nNodes; iNode++)
246 (*writeIterator) = ((TNeuron*)layer->At(iNode))->GetActivationValue();