70 #if __cplusplus > 199711L
77 ClassImp(TMVA::MethodANNBase);
85 TMVA::MethodANNBase::MethodANNBase(
const TString& jobName,
86 Types::EMVA methodType,
87 const TString& methodTitle,
89 const TString& theOption )
90 : TMVA::MethodBase( jobName, methodType, methodTitle, theData, theOption)
92 , fUseRegulator(kFALSE)
103 TMVA::MethodANNBase::MethodANNBase( Types::EMVA methodType,
104 DataSetInfo& theData,
105 const TString& theWeightFile)
106 : TMVA::MethodBase( methodType, theData, theWeightFile)
108 , fUseRegulator(kFALSE)
128 void TMVA::MethodANNBase::DeclareOptions()
130 DeclareOptionRef( fNcycles = 500,
"NCycles",
"Number of training cycles" );
131 DeclareOptionRef( fLayerSpec =
"N,N-1",
"HiddenLayers",
"Specification of hidden layer architecture" );
132 DeclareOptionRef( fNeuronType =
"sigmoid",
"NeuronType",
"Neuron activation function type" );
133 DeclareOptionRef( fRandomSeed = 1,
"RandomSeed",
"Random seed for initial synapse weights (0 means unique seed for each run; default value '1')");
135 DeclareOptionRef(fEstimatorS=
"MSE",
"EstimatorType",
136 "MSE (Mean Square Estimator) for Gaussian Likelihood or CE(Cross-Entropy) for Bernoulli Likelihood" );
137 AddPreDefVal(TString(
"MSE"));
138 AddPreDefVal(TString(
"CE"));
141 TActivationChooser aChooser;
142 std::vector<TString>* names = aChooser.GetAllActivationNames();
143 Int_t nTypes = names->size();
144 for (Int_t i = 0; i < nTypes; i++)
145 AddPreDefVal(names->at(i));
148 DeclareOptionRef(fNeuronInputType=
"sum",
"NeuronInputType",
"Neuron input function type");
149 TNeuronInputChooser iChooser;
150 names = iChooser.GetAllNeuronInputNames();
151 nTypes = names->size();
152 for (Int_t i = 0; i < nTypes; i++) AddPreDefVal(names->at(i));
160 void TMVA::MethodANNBase::ProcessOptions()
162 if ( DoRegression() || DoMulticlass()) fEstimatorS =
"MSE";
163 else fEstimatorS =
"CE" ;
164 if (fEstimatorS ==
"MSE" ) fEstimator = kMSE;
165 else if (fEstimatorS ==
"CE") fEstimator = kCE;
166 std::vector<Int_t>* layout = ParseLayoutString(fLayerSpec);
167 BuildNetwork(layout);
175 std::vector<Int_t>* TMVA::MethodANNBase::ParseLayoutString(TString layerSpec)
177 std::vector<Int_t>* layout =
new std::vector<Int_t>();
178 layout->push_back((Int_t)GetNvar());
179 while(layerSpec.Length()>0) {
181 if (layerSpec.First(
',')<0) {
186 sToAdd = layerSpec(0,layerSpec.First(
','));
187 layerSpec = layerSpec(layerSpec.First(
',')+1,layerSpec.Length());
190 if (sToAdd.BeginsWith(
"n") || sToAdd.BeginsWith(
"N")) { sToAdd.Remove(0,1); nNodes = GetNvar(); }
191 nNodes += atoi(sToAdd);
192 layout->push_back(nNodes);
195 layout->push_back( DataInfo().GetNTargets() );
196 else if( DoMulticlass() )
197 layout->push_back( DataInfo().GetNClasses() );
199 layout->push_back(1);
202 for( std::vector<Int_t>::iterator it = layout->begin(); it != layout->end(); ++it ){
212 void TMVA::MethodANNBase::InitANNBase()
219 fInputCalculator = NULL;
221 fEstimatorHistTrain = NULL;
222 fEstimatorHistTest = NULL;
225 fEpochMonHistS.clear();
226 fEpochMonHistB.clear();
227 fEpochMonHistW.clear();
231 fOutputNeurons.clear();
233 frgen =
new TRandom3(fRandomSeed);
235 fSynapses =
new TObjArray();
241 TMVA::MethodANNBase::~MethodANNBase()
249 void TMVA::MethodANNBase::DeleteNetwork()
251 if (fNetwork != NULL) {
253 Int_t numLayers = fNetwork->GetEntriesFast();
254 for (Int_t i = 0; i < numLayers; i++) {
255 layer = (TObjArray*)fNetwork->At(i);
256 DeleteNetworkLayer(layer);
261 if (frgen != NULL)
delete frgen;
262 if (fActivation != NULL)
delete fActivation;
263 if (fOutput != NULL)
delete fOutput;
264 if (fIdentity != NULL)
delete fIdentity;
265 if (fInputCalculator != NULL)
delete fInputCalculator;
266 if (fSynapses != NULL)
delete fSynapses;
273 fInputCalculator = NULL;
280 void TMVA::MethodANNBase::DeleteNetworkLayer( TObjArray*& layer )
283 Int_t numNeurons = layer->GetEntriesFast();
284 for (Int_t i = 0; i < numNeurons; i++) {
285 neuron = (TNeuron*)layer->At(i);
286 neuron->DeletePreLinks();
296 void TMVA::MethodANNBase::BuildNetwork( std::vector<Int_t>* layout, std::vector<Double_t>* weights, Bool_t fromFile )
298 if (fEstimatorS ==
"MSE") fEstimator = kMSE;
299 else if (fEstimatorS ==
"CE") fEstimator = kCE;
300 else Log()<<kWARNING<<
"fEstimator="<<fEstimator<<
"\tfEstimatorS="<<fEstimatorS<<Endl;
301 if (fEstimator!=kMSE && fEstimator!=kCE) Log()<<kWARNING<<
"Estimator type unspecified \t"<<Endl;
304 Log() << kHEADER <<
"Building Network. " << Endl;
310 TActivationChooser aChooser;
311 fActivation = aChooser.CreateActivation(fNeuronType);
312 fIdentity = aChooser.CreateActivation(
"linear");
313 if (fEstimator==kMSE) fOutput = aChooser.CreateActivation(
"linear");
314 else if (fEstimator==kCE) fOutput = aChooser.CreateActivation(
"sigmoid");
315 TNeuronInputChooser iChooser;
316 fInputCalculator = iChooser.CreateNeuronInput(fNeuronInputType);
318 fNetwork =
new TObjArray();
319 fRegulatorIdx.clear();
321 BuildLayers( layout, fromFile );
324 fInputLayer = (TObjArray*)fNetwork->At(0);
325 TObjArray* outputLayer = (TObjArray*)fNetwork->At(fNetwork->GetEntriesFast()-1);
326 fOutputNeurons.clear();
327 for (Int_t i = 0; i < outputLayer->GetEntries(); i++) {
328 fOutputNeurons.push_back( (TNeuron*)outputLayer->At(i) );
331 if (weights == NULL) InitWeights();
332 else ForceWeights(weights);
338 void TMVA::MethodANNBase::BuildLayers( std::vector<Int_t>* layout, Bool_t fromFile )
341 TObjArray* prevLayer = NULL;
343 Int_t numLayers = layout->size();
345 for (Int_t i = 0; i < numLayers; i++) {
346 curLayer =
new TObjArray();
347 BuildLayer(layout->at(i), curLayer, prevLayer, i, numLayers, fromFile);
348 prevLayer = curLayer;
349 fNetwork->Add(curLayer);
353 for (Int_t i = 0; i < numLayers; i++) {
354 TObjArray* layer = (TObjArray*)fNetwork->At(i);
355 Int_t numNeurons = layer->GetEntriesFast();
356 if (i!=0 && i!=numLayers-1) fRegulators.push_back(0.);
357 for (Int_t j = 0; j < numNeurons; j++) {
358 if (i==0) fRegulators.push_back(0.);
359 TNeuron* neuron = (TNeuron*)layer->At(j);
360 Int_t numSynapses = neuron->NumPostLinks();
361 for (Int_t k = 0; k < numSynapses; k++) {
362 TSynapse* synapse = neuron->PostLinkAt(k);
363 fSynapses->Add(synapse);
364 fRegulatorIdx.push_back(fRegulators.size()-1);
374 void TMVA::MethodANNBase::BuildLayer( Int_t numNeurons, TObjArray* curLayer,
375 TObjArray* prevLayer, Int_t layerIndex,
376 Int_t numLayers, Bool_t fromFile )
379 for (Int_t j = 0; j < numNeurons; j++) {
380 if (fromFile && (layerIndex != numLayers-1) && (j==numNeurons-1)){
381 neuron =
new TNeuron();
382 neuron->SetActivationEqn(fIdentity);
383 neuron->SetBiasNeuron();
384 neuron->ForceValue(1.0);
385 curLayer->Add(neuron);
388 neuron =
new TNeuron();
389 neuron->SetInputCalculator(fInputCalculator);
392 if (layerIndex == 0) {
393 neuron->SetActivationEqn(fIdentity);
394 neuron->SetInputNeuron();
398 if (layerIndex == numLayers-1) {
399 neuron->SetOutputNeuron();
400 neuron->SetActivationEqn(fOutput);
403 else neuron->SetActivationEqn(fActivation);
404 AddPreLinks(neuron, prevLayer);
407 curLayer->Add(neuron);
413 if (layerIndex != numLayers-1) {
414 neuron =
new TNeuron();
415 neuron->SetActivationEqn(fIdentity);
416 neuron->SetBiasNeuron();
417 neuron->ForceValue(1.0);
418 curLayer->Add(neuron);
426 void TMVA::MethodANNBase::AddPreLinks(TNeuron* neuron, TObjArray* prevLayer)
429 int numNeurons = prevLayer->GetEntriesFast();
432 for (Int_t i = 0; i < numNeurons; i++) {
433 preNeuron = (TNeuron*)prevLayer->At(i);
434 synapse =
new TSynapse();
435 synapse->SetPreNeuron(preNeuron);
436 synapse->SetPostNeuron(neuron);
437 preNeuron->AddPostLink(synapse);
438 neuron->AddPreLink(synapse);
445 void TMVA::MethodANNBase::InitWeights()
447 PrintMessage(
"Initializing weights");
450 Int_t numSynapses = fSynapses->GetEntriesFast();
452 for (Int_t i = 0; i < numSynapses; i++) {
453 synapse = (TSynapse*)fSynapses->At(i);
454 synapse->SetWeight(4.0*frgen->Rndm() - 2.0);
461 void TMVA::MethodANNBase::ForceWeights(std::vector<Double_t>* weights)
463 PrintMessage(
"Forcing weights");
465 Int_t numSynapses = fSynapses->GetEntriesFast();
467 for (Int_t i = 0; i < numSynapses; i++) {
468 synapse = (TSynapse*)fSynapses->At(i);
469 synapse->SetWeight(weights->at(i));
477 void TMVA::MethodANNBase::ForceNetworkInputs(
const Event* ev, Int_t ignoreIndex)
483 for (UInt_t j = 0; j < GetNvar(); j++) {
485 x = (j != (UInt_t)ignoreIndex)?ev->GetValue(j):0;
487 neuron = GetInputNeuron(j);
488 neuron->ForceValue(x);
495 void TMVA::MethodANNBase::ForceNetworkCalculations()
499 Int_t numLayers = fNetwork->GetEntriesFast();
502 for (Int_t i = 0; i < numLayers; i++) {
503 curLayer = (TObjArray*)fNetwork->At(i);
504 numNeurons = curLayer->GetEntriesFast();
506 for (Int_t j = 0; j < numNeurons; j++) {
507 neuron = (TNeuron*) curLayer->At(j);
508 neuron->CalculateValue();
509 neuron->CalculateActivationValue();
518 void TMVA::MethodANNBase::PrintMessage(TString message, Bool_t force)
const
520 if (Verbose() || Debug() || force) Log() << kINFO << message << Endl;
526 void TMVA::MethodANNBase::WaitForKeyboard()
529 Log() << kINFO <<
"***Type anything to continue (q to quit): ";
530 std::getline(std::cin, dummy);
531 if (dummy ==
"q" || dummy ==
"Q") {
532 PrintMessage(
"quit" );
541 void TMVA::MethodANNBase::PrintNetwork()
const
543 if (!Debug())
return;
545 Log() << kINFO << Endl;
546 PrintMessage(
"Printing network " );
547 Log() << kINFO <<
"-------------------------------------------------------------------" << Endl;
550 Int_t numLayers = fNetwork->GetEntriesFast();
552 for (Int_t i = 0; i < numLayers; i++) {
554 curLayer = (TObjArray*)fNetwork->At(i);
555 Int_t numNeurons = curLayer->GetEntriesFast();
557 Log() << kINFO <<
"Layer #" << i <<
" (" << numNeurons <<
" neurons):" << Endl;
558 PrintLayer( curLayer );
565 void TMVA::MethodANNBase::PrintLayer(TObjArray* layer)
const
567 Int_t numNeurons = layer->GetEntriesFast();
570 for (Int_t j = 0; j < numNeurons; j++) {
571 neuron = (TNeuron*) layer->At(j);
572 Log() << kINFO <<
"\tNeuron #" << j <<
" (LinksIn: " << neuron->NumPreLinks()
573 <<
" , LinksOut: " << neuron->NumPostLinks() <<
")" << Endl;
574 PrintNeuron( neuron );
581 void TMVA::MethodANNBase::PrintNeuron(TNeuron* neuron)
const
584 <<
"\t\tValue:\t" << neuron->GetValue()
585 <<
"\t\tActivation: " << neuron->GetActivationValue()
586 <<
"\t\tDelta: " << neuron->GetDelta() << Endl;
587 Log() << kINFO <<
"\t\tActivationEquation:\t";
588 neuron->PrintActivationEqn();
589 Log() << kINFO <<
"\t\tLinksIn:" << Endl;
590 neuron->PrintPreLinks();
591 Log() << kINFO <<
"\t\tLinksOut:" << Endl;
592 neuron->PrintPostLinks();
598 Double_t TMVA::MethodANNBase::GetMvaValue( Double_t* err, Double_t* errUpper )
602 TObjArray* inputLayer = (TObjArray*)fNetwork->At(0);
604 const Event * ev = GetEvent();
606 for (UInt_t i = 0; i < GetNvar(); i++) {
607 neuron = (TNeuron*)inputLayer->At(i);
608 neuron->ForceValue( ev->GetValue(i) );
610 ForceNetworkCalculations();
613 TObjArray* outputLayer = (TObjArray*)fNetwork->At( fNetwork->GetEntriesFast()-1 );
614 neuron = (TNeuron*)outputLayer->At(0);
617 NoErrorCalc(err, errUpper);
619 return neuron->GetActivationValue();
625 const std::vector<Float_t> &TMVA::MethodANNBase::GetRegressionValues()
629 TObjArray* inputLayer = (TObjArray*)fNetwork->At(0);
631 const Event * ev = GetEvent();
633 for (UInt_t i = 0; i < GetNvar(); i++) {
634 neuron = (TNeuron*)inputLayer->At(i);
635 neuron->ForceValue( ev->GetValue(i) );
637 ForceNetworkCalculations();
640 TObjArray* outputLayer = (TObjArray*)fNetwork->At( fNetwork->GetEntriesFast()-1 );
642 if (fRegressionReturnVal == NULL) fRegressionReturnVal =
new std::vector<Float_t>();
643 fRegressionReturnVal->clear();
645 Event * evT =
new Event(*ev);
646 UInt_t ntgts = outputLayer->GetEntriesFast();
647 for (UInt_t itgt = 0; itgt < ntgts; itgt++) {
648 evT->SetTarget(itgt,((TNeuron*)outputLayer->At(itgt))->GetActivationValue());
651 const Event* evT2 = GetTransformationHandler().InverseTransform( evT );
652 for (UInt_t itgt = 0; itgt < ntgts; itgt++) {
653 fRegressionReturnVal->push_back( evT2->GetTarget(itgt) );
658 return *fRegressionReturnVal;
664 const std::vector<Float_t> &TMVA::MethodANNBase::GetMulticlassValues()
668 TObjArray* inputLayer = (TObjArray*)fNetwork->At(0);
670 const Event * ev = GetEvent();
672 for (UInt_t i = 0; i < GetNvar(); i++) {
673 neuron = (TNeuron*)inputLayer->At(i);
674 neuron->ForceValue( ev->GetValue(i) );
676 ForceNetworkCalculations();
680 if (fMulticlassReturnVal == NULL) fMulticlassReturnVal =
new std::vector<Float_t>();
681 fMulticlassReturnVal->clear();
682 std::vector<Float_t> temp;
684 UInt_t nClasses = DataInfo().GetNClasses();
685 for (UInt_t icls = 0; icls < nClasses; icls++) {
686 temp.push_back(GetOutputNeuron( icls )->GetActivationValue() );
689 for(UInt_t iClass=0; iClass<nClasses; iClass++){
691 for(UInt_t j=0;j<nClasses;j++){
693 norm+=exp(temp[j]-temp[iClass]);
695 (*fMulticlassReturnVal).push_back(1.0/(1.0+norm));
700 return *fMulticlassReturnVal;
707 void TMVA::MethodANNBase::AddWeightsXMLTo(
void* parent )
const
709 Int_t numLayers = fNetwork->GetEntriesFast();
710 void* wght = gTools().xmlengine().NewChild(parent, 0,
"Weights");
711 void* xmlLayout = gTools().xmlengine().NewChild(wght, 0,
"Layout");
712 gTools().xmlengine().NewAttr(xmlLayout, 0,
"NLayers", gTools().StringFromInt(fNetwork->GetEntriesFast()) );
713 TString weights =
"";
714 for (Int_t i = 0; i < numLayers; i++) {
715 TObjArray* layer = (TObjArray*)fNetwork->At(i);
716 Int_t numNeurons = layer->GetEntriesFast();
717 void* layerxml = gTools().xmlengine().NewChild(xmlLayout, 0,
"Layer");
718 gTools().xmlengine().NewAttr(layerxml, 0,
"Index", gTools().StringFromInt(i) );
719 gTools().xmlengine().NewAttr(layerxml, 0,
"NNeurons", gTools().StringFromInt(numNeurons) );
720 for (Int_t j = 0; j < numNeurons; j++) {
721 TNeuron* neuron = (TNeuron*)layer->At(j);
722 Int_t numSynapses = neuron->NumPostLinks();
723 void* neuronxml = gTools().AddChild(layerxml,
"Neuron");
724 gTools().AddAttr(neuronxml,
"NSynapses", gTools().StringFromInt(numSynapses) );
725 if(numSynapses==0)
continue;
726 std::stringstream s(
"");
728 for (Int_t k = 0; k < numSynapses; k++) {
729 TSynapse* synapse = neuron->PostLinkAt(k);
730 s << std::scientific << synapse->GetWeight() <<
" ";
732 gTools().AddRawLine( neuronxml, s.str().c_str() );
737 if( fInvHessian.GetNcols()>0 ){
738 void* xmlInvHessian = gTools().xmlengine().NewChild(wght, 0,
"InverseHessian");
741 Int_t nElements = fInvHessian.GetNoElements();
742 Int_t nRows = fInvHessian.GetNrows();
743 Int_t nCols = fInvHessian.GetNcols();
744 gTools().xmlengine().NewAttr(xmlInvHessian, 0,
"NElements", gTools().StringFromInt(nElements) );
745 gTools().xmlengine().NewAttr(xmlInvHessian, 0,
"NRows", gTools().StringFromInt(nRows) );
746 gTools().xmlengine().NewAttr(xmlInvHessian, 0,
"NCols", gTools().StringFromInt(nCols) );
749 Double_t* elements =
new Double_t[nElements+10];
750 fInvHessian.GetMatrix2Array( elements );
754 for( Int_t row = 0; row < nRows; ++row ){
755 void* xmlRow = gTools().xmlengine().NewChild(xmlInvHessian, 0,
"Row");
756 gTools().xmlengine().NewAttr(xmlRow, 0,
"Index", gTools().StringFromInt(row) );
759 std::stringstream s(
"");
761 for( Int_t col = 0; col < nCols; ++col ){
762 s << std::scientific << (*(elements+index)) <<
" ";
765 gTools().xmlengine().AddRawLine( xmlRow, s.str().c_str() );
775 void TMVA::MethodANNBase::ReadWeightsFromXML(
void* wghtnode )
778 Bool_t fromFile = kTRUE;
779 std::vector<Int_t>* layout =
new std::vector<Int_t>();
781 void* xmlLayout = NULL;
782 xmlLayout = gTools().GetChild(wghtnode,
"Layout");
784 xmlLayout = wghtnode;
787 gTools().ReadAttr( xmlLayout,
"NLayers", nLayers );
788 layout->resize( nLayers );
790 void* ch = gTools().xmlengine().GetChild(xmlLayout);
794 gTools().ReadAttr( ch,
"Index", index );
795 gTools().ReadAttr( ch,
"NNeurons", nNeurons );
796 layout->at(index) = nNeurons;
797 ch = gTools().GetNextChild(ch);
800 BuildNetwork( layout, NULL, fromFile );
803 if (GetTrainingTMVAVersionCode() < TMVA_VERSION(4,2,1) && fActivation->GetExpression().Contains(
"tanh")){
804 TActivationTanh* act =
dynamic_cast<TActivationTanh*
>( fActivation );
805 if (act) act->SetSlow();
811 ch = gTools().xmlengine().GetChild(xmlLayout);
814 TObjArray* layer = (TObjArray*)fNetwork->At(iLayer);
815 gTools().ReadAttr( ch,
"Index", index );
816 gTools().ReadAttr( ch,
"NNeurons", nNeurons );
818 void* nodeN = gTools().GetChild(ch);
821 TNeuron *neuron = (TNeuron*)layer->At(iNeuron);
822 gTools().ReadAttr( nodeN,
"NSynapses", nSyn );
824 const char* content = gTools().GetContent(nodeN);
825 std::stringstream s(content);
826 for (UInt_t iSyn = 0; iSyn<nSyn; iSyn++) {
828 TSynapse* synapse = neuron->PostLinkAt(iSyn);
831 synapse->SetWeight(weight);
834 nodeN = gTools().GetNextChild(nodeN);
837 ch = gTools().GetNextChild(ch);
843 void* xmlInvHessian = NULL;
844 xmlInvHessian = gTools().GetChild(wghtnode,
"InverseHessian");
849 fUseRegulator = kTRUE;
854 gTools().ReadAttr( xmlInvHessian,
"NElements", nElements );
855 gTools().ReadAttr( xmlInvHessian,
"NRows", nRows );
856 gTools().ReadAttr( xmlInvHessian,
"NCols", nCols );
859 fInvHessian.ResizeTo( nRows, nCols );
863 if (nElements > std::numeric_limits<int>::max()-100){
864 Log() << kFATAL <<
"you tried to read a hessian matrix with " << nElements <<
" elements, --> too large, guess s.th. went wrong reading from the weight file" << Endl;
867 elements =
new Double_t[nElements+10];
872 void* xmlRow = gTools().xmlengine().GetChild(xmlInvHessian);
876 gTools().ReadAttr( xmlRow,
"Index", row );
878 const char* content = gTools().xmlengine().GetNodeContent(xmlRow);
880 std::stringstream s(content);
881 for (Int_t iCol = 0; iCol<nCols; iCol++) {
882 s >> (*(elements+index));
885 xmlRow = gTools().xmlengine().GetNext(xmlRow);
889 fInvHessian.SetMatrixArray( elements );
897 void TMVA::MethodANNBase::ReadWeightsFromStream( std::istream & istr)
905 std::vector<Double_t>* weights =
new std::vector<Double_t>();
907 while (istr>> dummy >> weight) weights->push_back(weight);
909 ForceWeights(weights);
918 const TMVA::Ranking* TMVA::MethodANNBase::CreateRanking()
921 fRanking =
new Ranking( GetName(),
"Importance" );
925 Double_t importance, avgVal;
928 for (UInt_t ivar = 0; ivar < GetNvar(); ivar++) {
930 neuron = GetInputNeuron(ivar);
931 Int_t numSynapses = neuron->NumPostLinks();
933 varName = GetInputVar(ivar);
936 Double_t meanS, meanB, rmsS, rmsB, xmin, xmax;
937 Statistics( TMVA::Types::kTraining, varName,
938 meanS, meanB, rmsS, rmsB, xmin, xmax );
940 avgVal = (TMath::Abs(meanS) + TMath::Abs(meanB))/2.0;
941 double meanrms = (TMath::Abs(rmsS) + TMath::Abs(rmsB))/2.;
942 if (avgVal<meanrms) avgVal = meanrms;
943 if (IsNormalised()) avgVal = 0.5*(1 + gTools().NormVariable( avgVal, GetXmin( ivar ), GetXmax( ivar )));
945 for (Int_t j = 0; j < numSynapses; j++) {
946 synapse = neuron->PostLinkAt(j);
947 importance += synapse->GetWeight() * synapse->GetWeight();
950 importance *= avgVal * avgVal;
952 fRanking->AddRank( Rank( varName, importance ) );
960 void TMVA::MethodANNBase::CreateWeightMonitoringHists(
const TString& bulkname,
961 std::vector<TH1*>* hv )
const
964 Int_t numLayers = fNetwork->GetEntriesFast();
966 for (Int_t i = 0; i < numLayers-1; i++) {
968 TObjArray* layer1 = (TObjArray*)fNetwork->At(i);
969 TObjArray* layer2 = (TObjArray*)fNetwork->At(i+1);
970 Int_t numNeurons1 = layer1->GetEntriesFast();
971 Int_t numNeurons2 = layer2->GetEntriesFast();
973 TString name = Form(
"%s%i%i", bulkname.Data(), i, i+1);
974 hist =
new TH2F(name +
"", name +
"",
975 numNeurons1, 0, numNeurons1, numNeurons2, 0, numNeurons2);
977 for (Int_t j = 0; j < numNeurons1; j++) {
979 TNeuron* neuron = (TNeuron*)layer1->At(j);
980 Int_t numSynapses = neuron->NumPostLinks();
982 for (Int_t k = 0; k < numSynapses; k++) {
984 TSynapse* synapse = neuron->PostLinkAt(k);
985 hist->SetBinContent(j+1, k+1, synapse->GetWeight());
990 if (hv) hv->push_back( hist );
1001 void TMVA::MethodANNBase::WriteMonitoringHistosToFile()
const
1003 PrintMessage(Form(
"Write special histos to file: %s", BaseDir()->GetPath()), kTRUE);
1005 if (fEstimatorHistTrain) fEstimatorHistTrain->Write();
1006 if (fEstimatorHistTest ) fEstimatorHistTest ->Write();
1009 CreateWeightMonitoringHists(
"weights_hist" );
1012 #if __cplusplus > 199711L
1013 static std::atomic<int> epochMonitoringDirectoryNumber{0};
1015 static int epochMonitoringDirectoryNumber = 0;
1017 int epochVal = epochMonitoringDirectoryNumber++;
1018 TDirectory* epochdir = NULL;
1020 epochdir = BaseDir()->mkdir(
"EpochMonitoring" );
1022 epochdir = BaseDir()->mkdir( Form(
"EpochMonitoring_%4d",epochVal) );
1025 for (std::vector<TH1*>::const_iterator it = fEpochMonHistS.begin(); it != fEpochMonHistS.end(); ++it) {
1029 for (std::vector<TH1*>::const_iterator it = fEpochMonHistB.begin(); it != fEpochMonHistB.end(); ++it) {
1033 for (std::vector<TH1*>::const_iterator it = fEpochMonHistW.begin(); it != fEpochMonHistW.end(); ++it) {
1043 void TMVA::MethodANNBase::MakeClassSpecific( std::ostream& fout,
const TString& className )
const
1045 Int_t numLayers = fNetwork->GetEntries();
1048 fout <<
" double ActivationFnc(double x) const;" << std::endl;
1049 fout <<
" double OutputActivationFnc(double x) const;" << std::endl;
1051 int numNodesFrom = -1;
1052 for (Int_t lIdx = 0; lIdx < numLayers; lIdx++) {
1053 int numNodesTo = ((TObjArray*)fNetwork->At(lIdx))->GetEntries();
1054 if (numNodesFrom<0) { numNodesFrom=numNodesTo;
continue; }
1055 fout <<
" double fWeightMatrix" << lIdx-1 <<
"to" << lIdx <<
"[" << numNodesTo <<
"][" << numNodesFrom <<
"];";
1056 fout <<
" // weight matrix from layer " << lIdx-1 <<
" to " << lIdx << std::endl;
1057 numNodesFrom = numNodesTo;
1060 fout <<
"};" << std::endl;
1064 fout <<
"inline void " << className <<
"::Initialize()" << std::endl;
1065 fout <<
"{" << std::endl;
1066 fout <<
" // build network structure" << std::endl;
1068 for (Int_t i = 0; i < numLayers-1; i++) {
1069 fout <<
" // weight matrix from layer " << i <<
" to " << i+1 << std::endl;
1070 TObjArray* layer = (TObjArray*)fNetwork->At(i);
1071 Int_t numNeurons = layer->GetEntriesFast();
1072 for (Int_t j = 0; j < numNeurons; j++) {
1073 TNeuron* neuron = (TNeuron*)layer->At(j);
1074 Int_t numSynapses = neuron->NumPostLinks();
1075 for (Int_t k = 0; k < numSynapses; k++) {
1076 TSynapse* synapse = neuron->PostLinkAt(k);
1077 fout <<
" fWeightMatrix" << i <<
"to" << i+1 <<
"[" << k <<
"][" << j <<
"] = " << synapse->GetWeight() <<
";" << std::endl;
1082 fout <<
"}" << std::endl;
1086 fout <<
"inline double " << className <<
"::GetMvaValue__( const std::vector<double>& inputValues ) const" << std::endl;
1087 fout <<
"{" << std::endl;
1088 fout <<
" if (inputValues.size() != (unsigned int)" << ((TObjArray *)fNetwork->At(0))->GetEntries() - 1 <<
") {"
1090 fout <<
" std::cout << \"Input vector needs to be of size \" << "
1091 << ((TObjArray *)fNetwork->At(0))->GetEntries() - 1 <<
" << std::endl;" << std::endl;
1092 fout <<
" return 0;" << std::endl;
1093 fout <<
" }" << std::endl;
1095 for (Int_t lIdx = 1; lIdx < numLayers; lIdx++) {
1096 TObjArray *layer = (TObjArray *)fNetwork->At(lIdx);
1097 int numNodes = layer->GetEntries();
1098 fout <<
" std::array<double, " << numNodes <<
"> fWeights" << lIdx <<
" {{}};" << std::endl;
1100 for (Int_t lIdx = 1; lIdx < numLayers - 1; lIdx++) {
1101 fout <<
" fWeights" << lIdx <<
".back() = 1.;" << std::endl;
1104 for (Int_t i = 0; i < numLayers - 1; i++) {
1105 fout <<
" // layer " << i <<
" to " << i + 1 << std::endl;
1106 if (i + 1 == numLayers - 1) {
1107 fout <<
" for (int o=0; o<" << ((TObjArray *)fNetwork->At(i + 1))->GetEntries() <<
"; o++) {" << std::endl;
1109 fout <<
" for (int o=0; o<" << ((TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 <<
"; o++) {"
1113 fout <<
" std::array<double, " << ((TObjArray *)fNetwork->At(i))->GetEntries()
1114 <<
"> buffer; // no need to initialise" << std::endl;
1115 fout <<
" for (int i = 0; i<" << ((TObjArray *)fNetwork->At(i))->GetEntries() <<
" - 1; i++) {"
1117 fout <<
" buffer[i] = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o][i] * inputValues[i];" << std::endl;
1118 fout <<
" } // loop over i" << std::endl;
1119 fout <<
" buffer.back() = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o]["
1120 << ((TObjArray *)fNetwork->At(i))->GetEntries() - 1 <<
"];" << std::endl;
1122 fout <<
" std::array<double, " << ((TObjArray *)fNetwork->At(i))->GetEntries()
1123 <<
"> buffer; // no need to initialise" << std::endl;
1124 fout <<
" for (int i=0; i<" << ((TObjArray *)fNetwork->At(i))->GetEntries() <<
"; i++) {" << std::endl;
1125 fout <<
" buffer[i] = fWeightMatrix" << i <<
"to" << i + 1 <<
"[o][i] * fWeights" << i <<
"[i];"
1127 fout <<
" } // loop over i" << std::endl;
1129 fout <<
" for (int i=0; i<" << ((TObjArray *)fNetwork->At(i))->GetEntries() <<
"; i++) {" << std::endl;
1130 if (fNeuronInputType ==
"sum") {
1131 fout <<
" fWeights" << i + 1 <<
"[o] += buffer[i];" << std::endl;
1132 }
else if (fNeuronInputType ==
"sqsum") {
1133 fout <<
" fWeights" << i + 1 <<
"[o] += buffer[i]*buffer[i];" << std::endl;
1135 fout <<
" fWeights" << i + 1 <<
"[o] += fabs(buffer[i]);" << std::endl;
1137 fout <<
" } // loop over i" << std::endl;
1138 fout <<
" } // loop over o" << std::endl;
1139 if (i + 1 == numLayers - 1) {
1140 fout <<
" for (int o=0; o<" << ((TObjArray *)fNetwork->At(i + 1))->GetEntries() <<
"; o++) {" << std::endl;
1142 fout <<
" for (int o=0; o<" << ((TObjArray *)fNetwork->At(i + 1))->GetEntries() - 1 <<
"; o++) {"
1145 if (i+1 != numLayers-1)
1146 fout <<
" fWeights" << i + 1 <<
"[o] = ActivationFnc(fWeights" << i + 1 <<
"[o]);" << std::endl;
1148 fout <<
" fWeights" << i + 1 <<
"[o] = OutputActivationFnc(fWeights" << i + 1 <<
"[o]);"
1150 fout <<
" } // loop over o" << std::endl;
1153 fout <<
" return fWeights" << numLayers - 1 <<
"[0];" << std::endl;
1154 fout <<
"}" << std::endl;
1157 TString fncName = className+
"::ActivationFnc";
1158 fActivation->MakeFunction(fout, fncName);
1159 fncName = className+
"::OutputActivationFnc";
1160 fOutput->MakeFunction(fout, fncName);
1163 fout <<
"// Clean up" << std::endl;
1164 fout <<
"inline void " << className <<
"::Clear()" << std::endl;
1165 fout <<
"{" << std::endl;
1166 fout <<
"}" << std::endl;
1172 Bool_t TMVA::MethodANNBase::Debug()
const