233 #include "TSynapse.h"
258 ClassImp(TMultiLayerPerceptron);
263 TMultiLayerPerceptron::TMultiLayerPerceptron()
265 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
266 fNetwork.SetOwner(
true);
267 fFirstLayer.SetOwner(
false);
268 fLastLayer.SetOwner(
false);
269 fSynapses.SetOwner(
true);
272 fCurrentTreeWeight = 1;
276 fTrainingOwner =
false;
281 fLearningMethod = TMultiLayerPerceptron::kBFGS;
289 fType = TNeuron::kSigmoid;
290 fOutType = TNeuron::kLinear;
317 TMultiLayerPerceptron::TMultiLayerPerceptron(
const char * layout, TTree * data,
318 TEventList * training,
320 TNeuron::ENeuronType type,
321 const char* extF,
const char* extD)
323 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
324 fNetwork.SetOwner(
true);
325 fFirstLayer.SetOwner(
false);
326 fLastLayer.SetOwner(
false);
327 fSynapses.SetOwner(
true);
331 fCurrentTreeWeight = 1;
332 fTraining = training;
333 fTrainingOwner =
false;
338 fOutType = TNeuron::kLinear;
347 fLearningMethod = TMultiLayerPerceptron::kBFGS;
379 TMultiLayerPerceptron::TMultiLayerPerceptron(
const char * layout,
380 const char * weight, TTree * data,
381 TEventList * training,
383 TNeuron::ENeuronType type,
384 const char* extF,
const char* extD)
386 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
387 fNetwork.SetOwner(
true);
388 fFirstLayer.SetOwner(
false);
389 fLastLayer.SetOwner(
false);
390 fSynapses.SetOwner(
true);
394 fCurrentTreeWeight = 1;
395 fTraining = training;
396 fTrainingOwner =
false;
401 fOutType = TNeuron::kLinear;
410 fLearningMethod = TMultiLayerPerceptron::kBFGS;
445 TMultiLayerPerceptron::TMultiLayerPerceptron(
const char * layout, TTree * data,
446 const char * training,
448 TNeuron::ENeuronType type,
449 const char* extF,
const char* extD)
451 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
452 fNetwork.SetOwner(
true);
453 fFirstLayer.SetOwner(
false);
454 fLastLayer.SetOwner(
false);
455 fSynapses.SetOwner(
true);
459 fCurrentTreeWeight = 1;
460 fTraining =
new TEventList(Form(
"fTrainingList_%lu",(ULong_t)
this));
461 fTrainingOwner =
true;
462 fTest =
new TEventList(Form(
"fTestList_%lu",(ULong_t)
this));
465 TString testcut = test;
466 if(testcut==
"") testcut = Form(
"!(%s)",training);
468 fOutType = TNeuron::kLinear;
475 data->Draw(Form(
">>fTrainingList_%lu",(ULong_t)
this),training,
"goff");
476 data->Draw(Form(
">>fTestList_%lu",(ULong_t)
this),(
const char *)testcut,
"goff");
480 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
482 fLearningMethod = TMultiLayerPerceptron::kBFGS;
517 TMultiLayerPerceptron::TMultiLayerPerceptron(
const char * layout,
518 const char * weight, TTree * data,
519 const char * training,
521 TNeuron::ENeuronType type,
522 const char* extF,
const char* extD)
524 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
525 fNetwork.SetOwner(
true);
526 fFirstLayer.SetOwner(
false);
527 fLastLayer.SetOwner(
false);
528 fSynapses.SetOwner(
true);
532 fCurrentTreeWeight = 1;
533 fTraining =
new TEventList(Form(
"fTrainingList_%lu",(ULong_t)
this));
534 fTrainingOwner =
true;
535 fTest =
new TEventList(Form(
"fTestList_%lu",(ULong_t)
this));
538 TString testcut = test;
539 if(testcut==
"") testcut = Form(
"!(%s)",training);
541 fOutType = TNeuron::kLinear;
548 data->Draw(Form(
">>fTrainingList_%lu",(ULong_t)
this),training,
"goff");
549 data->Draw(Form(
">>fTestList_%lu",(ULong_t)
this),(
const char *)testcut,
"goff");
553 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
555 fLearningMethod = TMultiLayerPerceptron::kBFGS;
568 TMultiLayerPerceptron::~TMultiLayerPerceptron()
570 if(fTraining && fTrainingOwner)
delete fTraining;
571 if(fTest && fTestOwner)
delete fTest;
577 void TMultiLayerPerceptron::SetData(TTree * data)
580 std::cerr <<
"Error: data already defined." << std::endl;
593 void TMultiLayerPerceptron::SetEventWeight(
const char * branch)
598 fManager->Remove(fEventWeight);
601 fManager->Add((fEventWeight =
new TTreeFormula(
"NNweight",fWeight.Data(),fData)));
609 void TMultiLayerPerceptron::SetTrainingDataSet(TEventList* train)
611 if(fTraining && fTrainingOwner)
delete fTraining;
613 fTrainingOwner =
false;
620 void TMultiLayerPerceptron::SetTestDataSet(TEventList* test)
622 if(fTest && fTestOwner)
delete fTest;
632 void TMultiLayerPerceptron::SetTrainingDataSet(
const char * train)
634 if(fTraining && fTrainingOwner)
delete fTraining;
635 fTraining =
new TEventList(Form(
"fTrainingList_%lu",(ULong_t)
this));
636 fTrainingOwner =
true;
638 fData->Draw(Form(
">>fTrainingList_%lu",(ULong_t)
this),train,
"goff");
641 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
650 void TMultiLayerPerceptron::SetTestDataSet(
const char * test)
652 if(fTest && fTestOwner) {
delete fTest; fTest=0;}
653 if(fTest)
if(strncmp(fTest->GetName(),Form(
"fTestList_%lu",(ULong_t)
this),10))
delete fTest;
654 fTest =
new TEventList(Form(
"fTestList_%lu",(ULong_t)
this));
657 fData->Draw(Form(
">>fTestList_%lu",(ULong_t)
this),test,
"goff");
660 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
671 void TMultiLayerPerceptron::SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
673 fLearningMethod = method;
681 void TMultiLayerPerceptron::SetEta(Double_t eta)
691 void TMultiLayerPerceptron::SetEpsilon(Double_t eps)
701 void TMultiLayerPerceptron::SetDelta(Double_t delta)
711 void TMultiLayerPerceptron::SetEtaDecay(Double_t ed)
721 void TMultiLayerPerceptron::SetTau(Double_t tau)
732 void TMultiLayerPerceptron::SetReset(Int_t reset)
740 void TMultiLayerPerceptron::GetEntry(Int_t entry)
const
743 fData->GetEntry(entry);
744 if (fData->GetTreeNumber() != fCurrentTree) {
745 ((TMultiLayerPerceptron*)
this)->fCurrentTree = fData->GetTreeNumber();
747 ((TMultiLayerPerceptron*)
this)->fCurrentTreeWeight = fData->GetWeight();
749 Int_t nentries = fNetwork.GetEntriesFast();
750 for (Int_t i=0;i<nentries;i++) {
751 TNeuron *neuron = (TNeuron *)fNetwork.UncheckedAt(i);
752 neuron->SetNewEvent();
769 void TMultiLayerPerceptron::Train(Int_t nEpoch, Option_t * option, Double_t minE)
772 TString opt = option;
776 Bool_t newCanvas =
true;
777 Bool_t minE_Train =
false;
778 Bool_t minE_Test =
false;
779 if (opt.Contains(
"text"))
781 if (opt.Contains(
"graph"))
783 Int_t displayStepping = 1;
784 if (opt.Contains(
"update=")) {
785 TRegexp reg(
"update=[0-9]*");
786 TString out = opt(reg);
787 displayStepping = atoi(out.Data() + 7);
789 if (opt.Contains(
"current"))
791 if (opt.Contains(
"minerrortrain"))
793 if (opt.Contains(
"minerrortest"))
795 TVirtualPad *canvas = 0;
796 TMultiGraph *residual_plot = 0;
797 TGraph *train_residual_plot = 0;
798 TGraph *test_residual_plot = 0;
799 if ((!fData) || (!fTraining) || (!fTest)) {
800 Error(
"Train",
"Training/Test samples still not defined. Cannot train the neural network");
803 Info(
"Train",
"Using %d train and %d test entries.",
804 fTraining->GetN(), fTest->GetN());
807 std::cout <<
"Training the Neural Network" << std::endl;
809 residual_plot =
new TMultiGraph;
811 canvas =
new TCanvas(
"NNtraining",
"Neural Net training");
814 if(!canvas) canvas =
new TCanvas(
"NNtraining",
"Neural Net training");
816 train_residual_plot =
new TGraph(nEpoch);
817 test_residual_plot =
new TGraph(nEpoch);
818 canvas->SetLeftMargin(0.14);
819 train_residual_plot->SetLineColor(4);
820 test_residual_plot->SetLineColor(2);
821 residual_plot->Add(train_residual_plot);
822 residual_plot->Add(test_residual_plot);
823 residual_plot->Draw(
"LA");
824 if (residual_plot->GetXaxis()) residual_plot->GetXaxis()->SetTitle(
"Epoch");
825 if (residual_plot->GetYaxis()) residual_plot->GetYaxis()->SetTitle(
"Error");
828 if (!opt.Contains(
"+"))
832 Int_t els = fNetwork.GetEntriesFast() + fSynapses.GetEntriesFast();
833 Double_t *buffer =
new Double_t[els];
834 Double_t *dir =
new Double_t[els];
835 for (i = 0; i < els; i++)
837 Int_t matrix_size = fLearningMethod==TMultiLayerPerceptron::kBFGS ? els : 1;
838 TMatrixD bfgsh(matrix_size, matrix_size);
839 TMatrixD gamma(matrix_size, 1);
840 TMatrixD delta(matrix_size, 1);
842 Double_t training_E = 1e10;
843 Double_t test_E = 1e10;
844 for (Int_t iepoch = 0; (iepoch < nEpoch) && (!minE_Train || training_E>minE) && (!minE_Test || test_E>minE) ; iepoch++) {
845 switch (fLearningMethod) {
846 case TMultiLayerPerceptron::kStochastic:
848 MLP_Stochastic(buffer);
851 case TMultiLayerPerceptron::kBatch:
857 case TMultiLayerPerceptron::kSteepestDescent:
861 if (LineSearch(dir, buffer))
865 case TMultiLayerPerceptron::kRibierePolak:
868 if (!(iepoch % fReset)) {
873 for (i = 0; i < els; i++)
874 onorm += dir[i] * dir[i];
878 TSynapse *synapse = 0;
879 Int_t nentries = fNetwork.GetEntriesFast();
880 for (i=0;i<nentries;i++) {
881 neuron = (TNeuron *) fNetwork.UncheckedAt(i);
882 prod -= dir[idx++] * neuron->GetDEDw();
883 norm += neuron->GetDEDw() * neuron->GetDEDw();
885 nentries = fSynapses.GetEntriesFast();
886 for (i=0;i<nentries;i++) {
887 synapse = (TSynapse *) fSynapses.UncheckedAt(i);
888 prod -= dir[idx++] * synapse->GetDEDw();
889 norm += synapse->GetDEDw() * synapse->GetDEDw();
891 ConjugateGradientsDir(dir, (norm - prod) / onorm);
893 if (LineSearch(dir, buffer))
897 case TMultiLayerPerceptron::kFletcherReeves:
900 if (!(iepoch % fReset)) {
905 for (i = 0; i < els; i++)
906 onorm += dir[i] * dir[i];
908 TSynapse *synapse = 0;
909 Int_t nentries = fNetwork.GetEntriesFast();
910 for (i=0;i<nentries;i++) {
911 neuron = (TNeuron *) fNetwork.UncheckedAt(i);
912 norm += neuron->GetDEDw() * neuron->GetDEDw();
914 nentries = fSynapses.GetEntriesFast();
915 for (i=0;i<nentries;i++) {
916 synapse = (TSynapse *) fSynapses.UncheckedAt(i);
917 norm += synapse->GetDEDw() * synapse->GetDEDw();
919 ConjugateGradientsDir(dir, norm / onorm);
921 if (LineSearch(dir, buffer))
925 case TMultiLayerPerceptron::kBFGS:
927 SetGammaDelta(gamma, delta, buffer);
928 if (!(iepoch % fReset)) {
932 if (GetBFGSH(bfgsh, gamma, delta)) {
939 if (DerivDir(dir) > 0) {
943 if (LineSearch(dir, buffer)) {
946 if (LineSearch(dir, buffer)) {
947 Error(
"TMultiLayerPerceptron::Train()",
"Line search fail");
956 if (TMath::IsNaN(GetError(TMultiLayerPerceptron::kTraining))) {
957 Error(
"TMultiLayerPerceptron::Train()",
"Stop.");
962 gSystem->ProcessEvents();
963 training_E = TMath::Sqrt(GetError(TMultiLayerPerceptron::kTraining) / fTraining->GetN());
964 test_E = TMath::Sqrt(GetError(TMultiLayerPerceptron::kTest) / fTest->GetN());
966 if ((verbosity % 2) && ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1))) {
967 std::cout <<
"Epoch: " << iepoch
968 <<
" learn=" << training_E
969 <<
" test=" << test_E
973 train_residual_plot->SetPoint(iepoch, iepoch,training_E);
974 test_residual_plot->SetPoint(iepoch, iepoch,test_E);
976 Double_t trp = train_residual_plot->GetY()[iepoch];
977 Double_t tep = test_residual_plot->GetY()[iepoch];
978 for (i = 1; i < nEpoch; i++) {
979 train_residual_plot->SetPoint(i, i, trp);
980 test_residual_plot->SetPoint(i, i, tep);
983 if ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1)) {
984 if (residual_plot->GetYaxis()) {
985 residual_plot->GetYaxis()->UnZoom();
986 residual_plot->GetYaxis()->SetTitleOffset(1.4);
987 residual_plot->GetYaxis()->SetDecimals();
999 std::cout <<
"Training done." << std::endl;
1000 if (verbosity / 2) {
1001 TLegend *legend =
new TLegend(.75, .80, .95, .95);
1002 legend->AddEntry(residual_plot->GetListOfGraphs()->At(0),
1003 "Training sample",
"L");
1004 legend->AddEntry(residual_plot->GetListOfGraphs()->At(1),
1005 "Test sample",
"L");
1014 Double_t TMultiLayerPerceptron::Result(Int_t event, Int_t index)
const
1017 TNeuron *out = (TNeuron *) (fLastLayer.At(index));
1019 return out->GetValue();
1027 Double_t TMultiLayerPerceptron::GetError(Int_t event)
const
1032 Int_t nEntries = fLastLayer.GetEntriesFast();
1033 if (nEntries == 0)
return 0.0;
1035 case (TNeuron::kSigmoid):
1036 error = GetCrossEntropyBinary();
1038 case (TNeuron::kSoftmax):
1039 error = GetCrossEntropy();
1041 case (TNeuron::kLinear):
1042 error = GetSumSquareError();
1046 error = GetSumSquareError();
1048 error *= fEventWeight->EvalInstance();
1049 error *= fCurrentTreeWeight;
1056 Double_t TMultiLayerPerceptron::GetError(TMultiLayerPerceptron::EDataSet set)
const
1059 ((set == TMultiLayerPerceptron::kTraining) ? fTraining : fTest);
1063 Int_t nEvents = list->GetN();
1064 for (i = 0; i < nEvents; i++) {
1065 error += GetError(list->GetEntry(i));
1068 Int_t nEvents = (Int_t) fData->GetEntries();
1069 for (i = 0; i < nEvents; i++) {
1070 error += GetError(i);
1079 Double_t TMultiLayerPerceptron::GetSumSquareError()
const
1082 for (Int_t i = 0; i < fLastLayer.GetEntriesFast(); i++) {
1083 TNeuron *neuron = (TNeuron *) fLastLayer[i];
1084 error += neuron->GetError() * neuron->GetError();
1086 return (error / 2.);
1092 Double_t TMultiLayerPerceptron::GetCrossEntropyBinary()
const
1095 for (Int_t i = 0; i < fLastLayer.GetEntriesFast(); i++) {
1096 TNeuron *neuron = (TNeuron *) fLastLayer[i];
1097 Double_t output = neuron->GetValue();
1098 Double_t target = neuron->GetTarget();
1099 if (target < DBL_EPSILON) {
1103 error -= TMath::Log(1 - output);
1105 if ((1 - target) < DBL_EPSILON) {
1109 error -= TMath::Log(output);
1111 if (output == 0.0 || output == 1.0)
1114 error -= target * TMath::Log(output / target) + (1-target) * TMath::Log((1 - output)/(1 - target));
1123 Double_t TMultiLayerPerceptron::GetCrossEntropy()
const
1126 for (Int_t i = 0; i < fLastLayer.GetEntriesFast(); i++) {
1127 TNeuron *neuron = (TNeuron *) fLastLayer[i];
1128 Double_t output = neuron->GetValue();
1129 Double_t target = neuron->GetTarget();
1130 if (target > DBL_EPSILON) {
1134 error -= target * TMath::Log(output / target);
1144 void TMultiLayerPerceptron::ComputeDEDw()
const
1147 Int_t nentries = fSynapses.GetEntriesFast();
1149 for (i=0;i<nentries;i++) {
1150 synapse = (TSynapse *) fSynapses.UncheckedAt(i);
1151 synapse->SetDEDw(0.);
1154 nentries = fNetwork.GetEntriesFast();
1155 for (i=0;i<nentries;i++) {
1156 neuron = (TNeuron *) fNetwork.UncheckedAt(i);
1157 neuron->SetDEDw(0.);
1159 Double_t eventWeight = 1.;
1161 Int_t nEvents = fTraining->GetN();
1162 for (i = 0; i < nEvents; i++) {
1163 GetEntry(fTraining->GetEntry(i));
1164 eventWeight = fEventWeight->EvalInstance();
1165 eventWeight *= fCurrentTreeWeight;
1166 nentries = fSynapses.GetEntriesFast();
1167 for (j=0;j<nentries;j++) {
1168 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
1169 synapse->SetDEDw(synapse->GetDEDw() + (synapse->GetDeDw()*eventWeight));
1171 nentries = fNetwork.GetEntriesFast();
1172 for (j=0;j<nentries;j++) {
1173 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
1174 neuron->SetDEDw(neuron->GetDEDw() + (neuron->GetDeDw()*eventWeight));
1177 nentries = fSynapses.GetEntriesFast();
1178 for (j=0;j<nentries;j++) {
1179 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
1180 synapse->SetDEDw(synapse->GetDEDw() / (Double_t) nEvents);
1182 nentries = fNetwork.GetEntriesFast();
1183 for (j=0;j<nentries;j++) {
1184 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
1185 neuron->SetDEDw(neuron->GetDEDw() / (Double_t) nEvents);
1188 Int_t nEvents = (Int_t) fData->GetEntries();
1189 for (i = 0; i < nEvents; i++) {
1191 eventWeight = fEventWeight->EvalInstance();
1192 eventWeight *= fCurrentTreeWeight;
1193 nentries = fSynapses.GetEntriesFast();
1194 for (j=0;j<nentries;j++) {
1195 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
1196 synapse->SetDEDw(synapse->GetDEDw() + (synapse->GetDeDw()*eventWeight));
1198 nentries = fNetwork.GetEntriesFast();
1199 for (j=0;j<nentries;j++) {
1200 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
1201 neuron->SetDEDw(neuron->GetDEDw() + (neuron->GetDeDw()*eventWeight));
1204 nentries = fSynapses.GetEntriesFast();
1205 for (j=0;j<nentries;j++) {
1206 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
1207 synapse->SetDEDw(synapse->GetDEDw() / (Double_t) nEvents);
1209 nentries = fNetwork.GetEntriesFast();
1210 for (j=0;j<nentries;j++) {
1211 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
1212 neuron->SetDEDw(neuron->GetDEDw() / (Double_t) nEvents);
1220 void TMultiLayerPerceptron::Randomize()
const
1222 Int_t nentries = fSynapses.GetEntriesFast();
1227 TRandom3 gen(ts.GetSec());
1228 for (j=0;j<nentries;j++) {
1229 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
1230 synapse->SetWeight(gen.Rndm() - 0.5);
1232 nentries = fNetwork.GetEntriesFast();
1233 for (j=0;j<nentries;j++) {
1234 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
1235 neuron->SetWeight(gen.Rndm() - 0.5);
1247 void TMultiLayerPerceptron::AttachData()
1250 TNeuron *neuron = 0;
1251 Bool_t normalize =
false;
1252 fManager =
new TTreeFormulaManager;
1255 Int_t maxop, maxpar, maxconst;
1256 ROOT::v5::TFormula::GetMaxima(maxop, maxpar, maxconst);
1257 ROOT::v5::TFormula::SetMaxima(10, 10, 10);
1260 const TString input = TString(fStructure(0, fStructure.First(
':')));
1261 const TObjArray *inpL = input.Tokenize(
", ");
1262 Int_t nentries = fFirstLayer.GetEntriesFast();
1264 R__ASSERT(nentries == inpL->GetLast()+1);
1265 for (j=0;j<nentries;j++) {
1267 const TString brName = ((TObjString *)inpL->At(j))->GetString();
1268 neuron = (TNeuron *) fFirstLayer.UncheckedAt(j);
1271 fManager->Add(neuron->UseBranch(fData,brName.Data() + (normalize?1:0)));
1272 if(!normalize) neuron->SetNormalisation(0., 1.);
1277 TString output = TString(
1278 fStructure(fStructure.Last(
':') + 1,
1279 fStructure.Length() - fStructure.Last(
':')));
1280 const TObjArray *outL = output.Tokenize(
", ");
1281 nentries = fLastLayer.GetEntriesFast();
1283 R__ASSERT(nentries == outL->GetLast()+1);
1284 for (j=0;j<nentries;j++) {
1286 const TString brName = ((TObjString *)outL->At(j))->GetString();
1287 neuron = (TNeuron *) fLastLayer.UncheckedAt(j);
1290 fManager->Add(neuron->UseBranch(fData,brName.Data() + (normalize?1:0)));
1291 if(!normalize) neuron->SetNormalisation(0., 1.);
1295 fManager->Add((fEventWeight =
new TTreeFormula(
"NNweight",fWeight.Data(),fData)));
1299 ROOT::v5::TFormula::SetMaxima(maxop, maxpar, maxconst);
1305 void TMultiLayerPerceptron::ExpandStructure()
1307 TString input = TString(fStructure(0, fStructure.First(
':')));
1308 const TObjArray *inpL = input.Tokenize(
", ");
1309 Int_t nneurons = inpL->GetLast()+1;
1311 TString hiddenAndOutput = TString(
1312 fStructure(fStructure.First(
':') + 1,
1313 fStructure.Length() - fStructure.First(
':')));
1317 for (i = 0; i<nneurons; i++) {
1318 const TString name = ((TObjString *)inpL->At(i))->GetString();
1319 TTreeFormula f(
"sizeTestFormula",name,fData);
1321 if(f.GetMultiplicity()==1 && f.GetNdata()>1) {
1322 Warning(
"TMultiLayerPerceptron::ExpandStructure()",
"Variable size arrays cannot be used to build implicitly an input layer. The index 0 will be assumed.");
1329 else if(f.GetNdata()>1) {
1330 for(Int_t j=0; j<f.GetNdata(); j++) {
1331 if(i||j) newInput +=
",";
1339 if(i) newInput +=
",";
1345 fStructure = newInput +
":" + hiddenAndOutput;
1351 void TMultiLayerPerceptron::BuildNetwork()
1354 TString input = TString(fStructure(0, fStructure.First(
':')));
1355 TString hidden = TString(
1356 fStructure(fStructure.First(
':') + 1,
1357 fStructure.Last(
':') - fStructure.First(
':') - 1));
1358 TString output = TString(
1359 fStructure(fStructure.Last(
':') + 1,
1360 fStructure.Length() - fStructure.Last(
':')));
1361 Int_t bll = atoi(TString(
1362 hidden(hidden.Last(
':') + 1,
1363 hidden.Length() - (hidden.Last(
':') + 1))).Data());
1364 if (input.Length() == 0) {
1365 Error(
"BuildNetwork()",
"malformed structure. No input layer.");
1368 if (output.Length() == 0) {
1369 Error(
"BuildNetwork()",
"malformed structure. No output layer.");
1372 BuildFirstLayer(input);
1373 BuildHiddenLayers(hidden);
1374 BuildLastLayer(output, bll);
1382 void TMultiLayerPerceptron::BuildFirstLayer(TString & input)
1384 const TObjArray *inpL = input.Tokenize(
", ");
1385 const Int_t nneurons =inpL->GetLast()+1;
1386 TNeuron *neuron = 0;
1388 for (i = 0; i<nneurons; i++) {
1389 const TString name = ((TObjString *)inpL->At(i))->GetString();
1390 neuron =
new TNeuron(TNeuron::kOff, name);
1391 fFirstLayer.AddLast(neuron);
1392 fNetwork.AddLast(neuron);
1400 void TMultiLayerPerceptron::BuildHiddenLayers(TString & hidden)
1403 Int_t end = hidden.Index(
":", beg + 1);
1404 Int_t prevStart = 0;
1405 Int_t prevStop = fNetwork.GetEntriesFast();
1408 BuildOneHiddenLayer(hidden(beg, end - beg), layer, prevStart, prevStop,
false);
1410 end = hidden.Index(
":", beg + 1);
1413 BuildOneHiddenLayer(hidden(beg, hidden.Length() - beg), layer, prevStart, prevStop,
true);
1419 void TMultiLayerPerceptron::BuildOneHiddenLayer(
const TString& sNumNodes, Int_t& layer,
1420 Int_t& prevStart, Int_t& prevStop,
1423 TNeuron *neuron = 0;
1424 TSynapse *synapse = 0;
1426 if (!sNumNodes.IsAlnum() || sNumNodes.IsAlpha()) {
1427 Error(
"BuildOneHiddenLayer",
1428 "The specification '%s' for hidden layer %d must contain only numbers!",
1429 sNumNodes.Data(), layer - 1);
1431 Int_t num = atoi(sNumNodes.Data());
1432 for (Int_t i = 0; i < num; i++) {
1433 name.Form(
"HiddenL%d:N%d",layer,i);
1434 neuron =
new TNeuron(fType, name,
"", (
const char*)fextF, (
const char*)fextD);
1435 fNetwork.AddLast(neuron);
1436 for (Int_t j = prevStart; j < prevStop; j++) {
1437 synapse =
new TSynapse((TNeuron *) fNetwork[j], neuron);
1438 fSynapses.AddLast(synapse);
1444 Int_t nEntries = fNetwork.GetEntriesFast();
1445 for (Int_t i = prevStop; i < nEntries; i++) {
1446 neuron = (TNeuron *) fNetwork[i];
1447 for (Int_t j = prevStop; j < nEntries; j++)
1448 neuron->AddInLayer((TNeuron *) fNetwork[j]);
1452 prevStart = prevStop;
1453 prevStop = fNetwork.GetEntriesFast();
1464 void TMultiLayerPerceptron::BuildLastLayer(TString & output, Int_t prev)
1466 Int_t nneurons = output.CountChar(
',')+1;
1467 if (fStructure.EndsWith(
"!")) {
1468 fStructure = TString(fStructure(0, fStructure.Length() - 1));
1470 fOutType = TNeuron::kSigmoid;
1472 fOutType = TNeuron::kSoftmax;
1474 Int_t prevStop = fNetwork.GetEntriesFast();
1475 Int_t prevStart = prevStop - prev;
1481 for (i = 0; i<nneurons; i++) {
1482 Ssiz_t nextpos=output.Index(
",",pos);
1484 name=output(pos,nextpos-pos);
1485 else name=output(pos,output.Length());
1487 neuron =
new TNeuron(fOutType, name);
1488 for (j = prevStart; j < prevStop; j++) {
1489 synapse =
new TSynapse((TNeuron *) fNetwork[j], neuron);
1490 fSynapses.AddLast(synapse);
1492 fLastLayer.AddLast(neuron);
1493 fNetwork.AddLast(neuron);
1496 Int_t nEntries = fNetwork.GetEntriesFast();
1497 for (i = prevStop; i < nEntries; i++) {
1498 neuron = (TNeuron *) fNetwork[i];
1499 for (j = prevStop; j < nEntries; j++)
1500 neuron->AddInLayer((TNeuron *) fNetwork[j]);
1514 void TMultiLayerPerceptron::DrawResult(Int_t index, Option_t * option)
const
1516 TString opt = option;
1518 TNeuron *out = (TNeuron *) (fLastLayer.At(index));
1520 Error(
"DrawResult()",
"no such output.");
1524 if (!opt.Contains(
"nocanv"))
1525 new TCanvas(
"NNresult",
"Neural Net output");
1526 const Double_t *norm = out->GetNormalisation();
1527 TEventList *events = 0;
1530 if (opt.Contains(
"train")) {
1532 setname = Form(
"train%d",index);
1533 }
else if (opt.Contains(
"test")) {
1535 setname = Form(
"test%d",index);
1537 if ((!fData) || (!events)) {
1538 Error(
"DrawResult()",
"no dataset.");
1541 if (opt.Contains(
"comp")) {
1543 TString title =
"Neural Net Output control. ";
1545 setname =
"MLP_" + setname +
"_comp";
1546 TH2D *hist = ((TH2D *) gDirectory->Get(setname.Data()));
1548 hist =
new TH2D(setname.Data(), title.Data(), 50, -1, 1, 50, -1, 1);
1550 Int_t nEvents = events->GetN();
1551 for (i = 0; i < nEvents; i++) {
1552 GetEntry(events->GetEntry(i));
1553 hist->Fill(out->GetValue(), (out->GetBranch() - norm[1]) / norm[0]);
1558 TString title =
"Neural Net Output. ";
1560 setname =
"MLP_" + setname;
1561 TH1D *hist = ((TH1D *) gDirectory->Get(setname.Data()));
1563 hist =
new TH1D(setname, title, 50, 1, -1);
1565 Int_t nEvents = events->GetN();
1566 for (i = 0; i < nEvents; i++)
1567 hist->Fill(Result(events->GetEntry(i), index));
1569 if (opt.Contains(
"train") && opt.Contains(
"test")) {
1572 hist = ((TH1D *) gDirectory->Get(
"MLP_test"));
1574 hist =
new TH1D(setname, title, 50, 1, -1);
1576 nEvents = events->GetN();
1577 for (i = 0; i < nEvents; i++)
1578 hist->Fill(Result(events->GetEntry(i), index));
1588 Bool_t TMultiLayerPerceptron::DumpWeights(Option_t * filename)
const
1590 TString filen = filename;
1591 std::ostream * output;
1593 Error(
"TMultiLayerPerceptron::DumpWeights()",
"Invalid file name");
1597 output = &std::cout;
1599 output =
new std::ofstream(filen.Data());
1600 TNeuron *neuron = 0;
1601 *output <<
"#input normalization" << std::endl;
1602 Int_t nentries = fFirstLayer.GetEntriesFast();
1604 for (j=0;j<nentries;j++) {
1605 neuron = (TNeuron *) fFirstLayer.UncheckedAt(j);
1606 *output << neuron->GetNormalisation()[0] <<
" "
1607 << neuron->GetNormalisation()[1] << std::endl;
1609 *output <<
"#output normalization" << std::endl;
1610 nentries = fLastLayer.GetEntriesFast();
1611 for (j=0;j<nentries;j++) {
1612 neuron = (TNeuron *) fLastLayer.UncheckedAt(j);
1613 *output << neuron->GetNormalisation()[0] <<
" "
1614 << neuron->GetNormalisation()[1] << std::endl;
1616 *output <<
"#neurons weights" << std::endl;
1617 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
1618 while ((neuron = (TNeuron *) it->Next()))
1619 *output << neuron->GetWeight() << std::endl;
1621 it = (TObjArrayIter *) fSynapses.MakeIterator();
1622 TSynapse *synapse = 0;
1623 *output <<
"#synapses weights" << std::endl;
1624 while ((synapse = (TSynapse *) it->Next()))
1625 *output << synapse->GetWeight() << std::endl;
1628 ((std::ofstream *) output)->close();
1638 Bool_t TMultiLayerPerceptron::LoadWeights(Option_t * filename)
1640 TString filen = filename;
1643 Error(
"TMultiLayerPerceptron::LoadWeights()",
"Invalid file name");
1646 char *buff =
new char[100];
1647 std::ifstream input(filen.Data());
1649 input.getline(buff, 100);
1650 TObjArrayIter *it = (TObjArrayIter *) fFirstLayer.MakeIterator();
1652 TNeuron *neuron = 0;
1653 while ((neuron = (TNeuron *) it->Next())) {
1655 neuron->SetNormalisation(n2,n1);
1657 input.getline(buff, 100);
1659 input.getline(buff, 100);
1661 it = (TObjArrayIter *) fLastLayer.MakeIterator();
1662 while ((neuron = (TNeuron *) it->Next())) {
1664 neuron->SetNormalisation(n2,n1);
1666 input.getline(buff, 100);
1668 input.getline(buff, 100);
1670 it = (TObjArrayIter *) fNetwork.MakeIterator();
1671 while ((neuron = (TNeuron *) it->Next())) {
1673 neuron->SetWeight(w);
1676 input.getline(buff, 100);
1678 input.getline(buff, 100);
1679 it = (TObjArrayIter *) fSynapses.MakeIterator();
1680 TSynapse *synapse = 0;
1681 while ((synapse = (TSynapse *) it->Next())) {
1683 synapse->SetWeight(w);
1694 Double_t TMultiLayerPerceptron::Evaluate(Int_t index, Double_t *params)
const
1696 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
1698 while ((neuron = (TNeuron *) it->Next()))
1699 neuron->SetNewEvent();
1701 it = (TObjArrayIter *) fFirstLayer.MakeIterator();
1703 while ((neuron = (TNeuron *) it->Next()))
1704 neuron->ForceExternalValue(params[i++]);
1706 TNeuron *out = (TNeuron *) (fLastLayer.At(index));
1708 return out->GetValue();
1719 void TMultiLayerPerceptron::Export(Option_t * filename, Option_t * language)
const
1721 TString lg = language;
1724 if(GetType()==TNeuron::kExternal) {
1725 Warning(
"TMultiLayerPerceptron::Export",
"Request to export a network using an external function");
1728 TString basefilename = filename;
1729 Int_t slash = basefilename.Last(
'/')+1;
1730 if (slash) basefilename = TString(basefilename(slash, basefilename.Length()-slash));
1732 TString classname = basefilename;
1733 TString header = filename;
1735 TString source = filename;
1737 std::ofstream headerfile(header);
1738 std::ofstream sourcefile(source);
1739 headerfile <<
"#ifndef " << basefilename <<
"_h" << std::endl;
1740 headerfile <<
"#define " << basefilename <<
"_h" << std::endl << std::endl;
1741 headerfile <<
"class " << classname <<
" { " << std::endl;
1742 headerfile <<
"public:" << std::endl;
1743 headerfile <<
" " << classname <<
"() {}" << std::endl;
1744 headerfile <<
" ~" << classname <<
"() {}" << std::endl;
1745 sourcefile <<
"#include \"" << header <<
"\"" << std::endl;
1746 sourcefile <<
"#include <cmath>" << std::endl << std::endl;
1747 headerfile <<
" double Value(int index";
1748 sourcefile <<
"double " << classname <<
"::Value(int index";
1749 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++) {
1750 headerfile <<
",double in" << i;
1751 sourcefile <<
",double in" << i;
1753 headerfile <<
");" << std::endl;
1754 sourcefile <<
") {" << std::endl;
1755 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++)
1756 sourcefile <<
" input" << i <<
" = (in" << i <<
" - "
1757 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[1] <<
")/"
1758 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[0] <<
";"
1760 sourcefile <<
" switch(index) {" << std::endl;
1762 TObjArrayIter *it = (TObjArrayIter *) fLastLayer.MakeIterator();
1764 while ((neuron = (TNeuron *) it->Next()))
1765 sourcefile <<
" case " << idx++ <<
":" << std::endl
1766 <<
" return neuron" << neuron <<
"();" << std::endl;
1767 sourcefile <<
" default:" << std::endl
1768 <<
" return 0.;" << std::endl <<
" }"
1770 sourcefile <<
"}" << std::endl << std::endl;
1771 headerfile <<
" double Value(int index, double* input);" << std::endl;
1772 sourcefile <<
"double " << classname <<
"::Value(int index, double* input) {" << std::endl;
1773 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++)
1774 sourcefile <<
" input" << i <<
" = (input[" << i <<
"] - "
1775 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[1] <<
")/"
1776 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[0] <<
";"
1778 sourcefile <<
" switch(index) {" << std::endl;
1780 it = (TObjArrayIter *) fLastLayer.MakeIterator();
1782 while ((neuron = (TNeuron *) it->Next()))
1783 sourcefile <<
" case " << idx++ <<
":" << std::endl
1784 <<
" return neuron" << neuron <<
"();" << std::endl;
1785 sourcefile <<
" default:" << std::endl
1786 <<
" return 0.;" << std::endl <<
" }"
1788 sourcefile <<
"}" << std::endl << std::endl;
1789 headerfile <<
"private:" << std::endl;
1790 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++)
1791 headerfile <<
" double input" << i <<
";" << std::endl;
1793 it = (TObjArrayIter *) fNetwork.MakeIterator();
1795 while ((neuron = (TNeuron *) it->Next())) {
1796 if (!neuron->GetPre(0)) {
1797 headerfile <<
" double neuron" << neuron <<
"();" << std::endl;
1798 sourcefile <<
"double " << classname <<
"::neuron" << neuron
1799 <<
"() {" << std::endl;
1800 sourcefile <<
" return input" << idx++ <<
";" << std::endl;
1801 sourcefile <<
"}" << std::endl << std::endl;
1803 headerfile <<
" double input" << neuron <<
"();" << std::endl;
1804 sourcefile <<
"double " << classname <<
"::input" << neuron
1805 <<
"() {" << std::endl;
1806 sourcefile <<
" double input = " << neuron->GetWeight()
1807 <<
";" << std::endl;
1810 while ((syn = neuron->GetPre(n++))) {
1811 sourcefile <<
" input += synapse" << syn <<
"();" << std::endl;
1813 sourcefile <<
" return input;" << std::endl;
1814 sourcefile <<
"}" << std::endl << std::endl;
1816 headerfile <<
" double neuron" << neuron <<
"();" << std::endl;
1817 sourcefile <<
"double " << classname <<
"::neuron" << neuron <<
"() {" << std::endl;
1818 sourcefile <<
" double input = input" << neuron <<
"();" << std::endl;
1819 switch(neuron->GetType()) {
1820 case (TNeuron::kSigmoid):
1822 sourcefile <<
" return ((input < -709. ? 0. : (1/(1+exp(-input)))) * ";
1825 case (TNeuron::kLinear):
1827 sourcefile <<
" return (input * ";
1830 case (TNeuron::kTanh):
1832 sourcefile <<
" return (tanh(input) * ";
1835 case (TNeuron::kGauss):
1837 sourcefile <<
" return (exp(-input*input) * ";
1840 case (TNeuron::kSoftmax):
1842 sourcefile <<
" return (exp(input) / (";
1844 TNeuron* side = neuron->GetInLayer(nn++);
1845 sourcefile <<
"exp(input" << side <<
"())";
1846 while ((side = neuron->GetInLayer(nn++)))
1847 sourcefile <<
" + exp(input" << side <<
"())";
1848 sourcefile <<
") * ";
1853 sourcefile <<
" return (0.0 * ";
1856 sourcefile << neuron->GetNormalisation()[0] <<
")+" ;
1857 sourcefile << neuron->GetNormalisation()[1] <<
";" << std::endl;
1858 sourcefile <<
"}" << std::endl << std::endl;
1862 TSynapse *synapse = 0;
1863 it = (TObjArrayIter *) fSynapses.MakeIterator();
1864 while ((synapse = (TSynapse *) it->Next())) {
1865 headerfile <<
" double synapse" << synapse <<
"();" << std::endl;
1866 sourcefile <<
"double " << classname <<
"::synapse"
1867 << synapse <<
"() {" << std::endl;
1868 sourcefile <<
" return (neuron" << synapse->GetPre()
1869 <<
"()*" << synapse->GetWeight() <<
");" << std::endl;
1870 sourcefile <<
"}" << std::endl << std::endl;
1873 headerfile <<
"};" << std::endl << std::endl;
1874 headerfile <<
"#endif // " << basefilename <<
"_h" << std::endl << std::endl;
1877 std::cout << header <<
" and " << source <<
" created." << std::endl;
1879 else if(lg ==
"FORTRAN") {
1880 TString implicit =
" implicit double precision (a-h,n-z)\n";
1881 std::ofstream sigmoid(
"sigmoid.f");
1882 sigmoid <<
" double precision FUNCTION SIGMOID(X)" << std::endl
1884 <<
" IF(X.GT.37.) THEN" << std::endl
1885 <<
" SIGMOID = 1." << std::endl
1886 <<
" ELSE IF(X.LT.-709.) THEN" << std::endl
1887 <<
" SIGMOID = 0." << std::endl
1888 <<
" ELSE" << std::endl
1889 <<
" SIGMOID = 1./(1.+EXP(-X))" << std::endl
1890 <<
" ENDIF" << std::endl
1891 <<
" END" << std::endl;
1893 TString source = filename;
1895 std::ofstream sourcefile(source);
1898 sourcefile <<
" double precision function " << filename
1899 <<
"(x, index)" << std::endl;
1900 sourcefile << implicit;
1901 sourcefile <<
" double precision x(" <<
1902 fFirstLayer.GetEntriesFast() <<
")" << std::endl << std::endl;
1905 sourcefile <<
"C --- Last Layer" << std::endl;
1907 TObjArrayIter *it = (TObjArrayIter *) fLastLayer.MakeIterator();
1909 TString ifelseif =
" if (index.eq.";
1910 while ((neuron = (TNeuron *) it->Next())) {
1911 sourcefile << ifelseif.Data() << idx++ <<
") then" << std::endl
1913 <<
"=neuron" << neuron <<
"(x);" << std::endl;
1914 ifelseif =
" else if (index.eq.";
1916 sourcefile <<
" else" << std::endl
1917 <<
" " << filename <<
"=0.d0" << std::endl
1918 <<
" endif" << std::endl;
1919 sourcefile <<
" end" << std::endl;
1922 sourcefile <<
"C --- First and Hidden layers" << std::endl;
1924 it = (TObjArrayIter *) fNetwork.MakeIterator();
1926 while ((neuron = (TNeuron *) it->Next())) {
1927 sourcefile <<
" double precision function neuron"
1928 << neuron <<
"(x)" << std::endl
1930 sourcefile <<
" double precision x("
1931 << fFirstLayer.GetEntriesFast() <<
")" << std::endl << std::endl;
1932 if (!neuron->GetPre(0)) {
1933 sourcefile <<
" neuron" << neuron
1934 <<
" = (x(" << idx+1 <<
") - "
1935 << ((TNeuron *) fFirstLayer[idx])->GetNormalisation()[1]
1937 << ((TNeuron *) fFirstLayer[idx])->GetNormalisation()[0]
1938 <<
"d0" << std::endl;
1941 sourcefile <<
" neuron" << neuron
1942 <<
" = " << neuron->GetWeight() <<
"d0" << std::endl;
1945 while ((syn = neuron->GetPre(n++)))
1946 sourcefile <<
" neuron" << neuron
1947 <<
" = neuron" << neuron
1948 <<
" + synapse" << syn <<
"(x)" << std::endl;
1949 switch(neuron->GetType()) {
1950 case (TNeuron::kSigmoid):
1952 sourcefile <<
" neuron" << neuron
1953 <<
"= (sigmoid(neuron" << neuron <<
")*";
1956 case (TNeuron::kLinear):
1960 case (TNeuron::kTanh):
1962 sourcefile <<
" neuron" << neuron
1963 <<
"= (tanh(neuron" << neuron <<
")*";
1966 case (TNeuron::kGauss):
1968 sourcefile <<
" neuron" << neuron
1969 <<
"= (exp(-neuron" << neuron <<
"*neuron"
1973 case (TNeuron::kSoftmax):
1976 TNeuron* side = neuron->GetInLayer(nn++);
1977 sourcefile <<
" div = exp(neuron" << side <<
"())" << std::endl;
1978 while ((side = neuron->GetInLayer(nn++)))
1979 sourcefile <<
" div = div + exp(neuron" << side <<
"())" << std::endl;
1980 sourcefile <<
" neuron" << neuron ;
1981 sourcefile <<
"= (exp(neuron" << neuron <<
") / div * ";
1986 sourcefile <<
" neuron " << neuron <<
"= 0.";
1989 sourcefile << neuron->GetNormalisation()[0] <<
"d0)+" ;
1990 sourcefile << neuron->GetNormalisation()[1] <<
"d0" << std::endl;
1992 sourcefile <<
" end" << std::endl;
1997 sourcefile <<
"C --- Synapses" << std::endl;
1998 TSynapse *synapse = 0;
1999 it = (TObjArrayIter *) fSynapses.MakeIterator();
2000 while ((synapse = (TSynapse *) it->Next())) {
2001 sourcefile <<
" double precision function " <<
"synapse"
2002 << synapse <<
"(x)\n" << implicit;
2003 sourcefile <<
" double precision x("
2004 << fFirstLayer.GetEntriesFast() <<
")" << std::endl << std::endl;
2005 sourcefile <<
" synapse" << synapse
2006 <<
"=neuron" << synapse->GetPre()
2007 <<
"(x)*" << synapse->GetWeight() <<
"d0" << std::endl;
2008 sourcefile <<
" end" << std::endl << std::endl;
2012 std::cout << source <<
" created." << std::endl;
2014 else if(lg ==
"PYTHON") {
2015 TString classname = filename;
2016 TString pyfile = filename;
2018 std::ofstream pythonfile(pyfile);
2019 pythonfile <<
"from math import exp" << std::endl << std::endl;
2020 pythonfile <<
"from math import tanh" << std::endl << std::endl;
2021 pythonfile <<
"class " << classname <<
":" << std::endl;
2022 pythonfile <<
"\tdef value(self,index";
2023 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++) {
2024 pythonfile <<
",in" << i;
2026 pythonfile <<
"):" << std::endl;
2027 for (i = 0; i < fFirstLayer.GetEntriesFast(); i++)
2028 pythonfile <<
"\t\tself.input" << i <<
" = (in" << i <<
" - "
2029 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[1] <<
")/"
2030 << ((TNeuron *) fFirstLayer[i])->GetNormalisation()[0] << std::endl;
2032 TObjArrayIter *it = (TObjArrayIter *) fLastLayer.MakeIterator();
2034 while ((neuron = (TNeuron *) it->Next()))
2035 pythonfile <<
"\t\tif index==" << idx++
2036 <<
": return self.neuron" << neuron <<
"();" << std::endl;
2037 pythonfile <<
"\t\treturn 0." << std::endl;
2039 it = (TObjArrayIter *) fNetwork.MakeIterator();
2041 while ((neuron = (TNeuron *) it->Next())) {
2042 pythonfile <<
"\tdef neuron" << neuron <<
"(self):" << std::endl;
2043 if (!neuron->GetPre(0))
2044 pythonfile <<
"\t\treturn self.input" << idx++ << std::endl;
2046 pythonfile <<
"\t\tinput = " << neuron->GetWeight() << std::endl;
2049 while ((syn = neuron->GetPre(n++)))
2050 pythonfile <<
"\t\tinput = input + self.synapse"
2051 << syn <<
"()" << std::endl;
2052 switch(neuron->GetType()) {
2053 case (TNeuron::kSigmoid):
2055 pythonfile <<
"\t\tif input<-709. : return " << neuron->GetNormalisation()[1] << std::endl;
2056 pythonfile <<
"\t\treturn ((1/(1+exp(-input)))*";
2059 case (TNeuron::kLinear):
2061 pythonfile <<
"\t\treturn (input*";
2064 case (TNeuron::kTanh):
2066 pythonfile <<
"\t\treturn (tanh(input)*";
2069 case (TNeuron::kGauss):
2071 pythonfile <<
"\t\treturn (exp(-input*input)*";
2074 case (TNeuron::kSoftmax):
2076 pythonfile <<
"\t\treturn (exp(input) / (";
2078 TNeuron* side = neuron->GetInLayer(nn++);
2079 pythonfile <<
"exp(self.neuron" << side <<
"())";
2080 while ((side = neuron->GetInLayer(nn++)))
2081 pythonfile <<
" + exp(self.neuron" << side <<
"())";
2082 pythonfile <<
") * ";
2087 pythonfile <<
"\t\treturn 0.";
2090 pythonfile << neuron->GetNormalisation()[0] <<
")+" ;
2091 pythonfile << neuron->GetNormalisation()[1] << std::endl;
2095 TSynapse *synapse = 0;
2096 it = (TObjArrayIter *) fSynapses.MakeIterator();
2097 while ((synapse = (TSynapse *) it->Next())) {
2098 pythonfile <<
"\tdef synapse" << synapse <<
"(self):" << std::endl;
2099 pythonfile <<
"\t\treturn (self.neuron" << synapse->GetPre()
2100 <<
"()*" << synapse->GetWeight() <<
")" << std::endl;
2104 std::cout << pyfile <<
" created." << std::endl;
2120 void TMultiLayerPerceptron::Shuffle(Int_t * index, Int_t n)
const
2123 TRandom3 rnd(ts.GetSec());
2126 for (Int_t i = 0; i < n; i++) {
2127 j = (Int_t) (rnd.Rndm() * a);
2129 index[j] = index[i];
2139 void TMultiLayerPerceptron::MLP_Stochastic(Double_t * buffer)
2141 Int_t nEvents = fTraining->GetN();
2142 Int_t *index =
new Int_t[nEvents];
2144 for (i = 0; i < nEvents; i++)
2147 Shuffle(index, nEvents);
2150 for (i = 0; i < nEvents; i++) {
2151 GetEntry(fTraining->GetEntry(index[i]));
2154 nentries = fFirstLayer.GetEntriesFast();
2155 for (j=0;j<nentries;j++) {
2156 neuron = (TNeuron *) fFirstLayer.UncheckedAt(j);
2161 nentries = fNetwork.GetEntriesFast();
2162 for (j=0;j<nentries;j++) {
2163 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
2164 buffer[cnt] = (-fEta) * (neuron->GetDeDw() + fDelta)
2165 + fEpsilon * buffer[cnt];
2166 neuron->SetWeight(neuron->GetWeight() + buffer[cnt++]);
2169 nentries = fSynapses.GetEntriesFast();
2170 for (j=0;j<nentries;j++) {
2171 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
2172 buffer[cnt] = (-fEta) * (synapse->GetDeDw() + fDelta)
2173 + fEpsilon * buffer[cnt];
2174 synapse->SetWeight(synapse->GetWeight() + buffer[cnt++]);
2184 void TMultiLayerPerceptron::MLP_Batch(Double_t * buffer)
2188 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
2189 TNeuron *neuron = 0;
2191 while ((neuron = (TNeuron *) it->Next())) {
2192 buffer[cnt] = (-fEta) * (neuron->GetDEDw() + fDelta)
2193 + fEpsilon * buffer[cnt];
2194 neuron->SetWeight(neuron->GetWeight() + buffer[cnt++]);
2197 it = (TObjArrayIter *) fSynapses.MakeIterator();
2198 TSynapse *synapse = 0;
2200 while ((synapse = (TSynapse *) it->Next())) {
2201 buffer[cnt] = (-fEta) * (synapse->GetDEDw() + fDelta)
2202 + fEpsilon * buffer[cnt];
2203 synapse->SetWeight(synapse->GetWeight() + buffer[cnt++]);
2212 void TMultiLayerPerceptron::MLP_Line(Double_t * origin, Double_t * dir, Double_t dist)
2215 TNeuron *neuron = 0;
2216 TSynapse *synapse = 0;
2217 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
2218 while ((neuron = (TNeuron *) it->Next())) {
2219 neuron->SetWeight(origin[idx] + (dir[idx] * dist));
2223 it = (TObjArrayIter *) fSynapses.MakeIterator();
2224 while ((synapse = (TSynapse *) it->Next())) {
2225 synapse->SetWeight(origin[idx] + (dir[idx] * dist));
2234 void TMultiLayerPerceptron::SteepestDir(Double_t * dir)
2237 TNeuron *neuron = 0;
2238 TSynapse *synapse = 0;
2239 TObjArrayIter *it = (TObjArrayIter *) fNetwork.MakeIterator();
2240 while ((neuron = (TNeuron *) it->Next()))
2241 dir[idx++] = -neuron->GetDEDw();
2243 it = (TObjArrayIter *) fSynapses.MakeIterator();
2244 while ((synapse = (TSynapse *) it->Next()))
2245 dir[idx++] = -synapse->GetDEDw();
2255 bool TMultiLayerPerceptron::LineSearch(Double_t * direction, Double_t * buffer)
2259 TNeuron *neuron = 0;
2260 TSynapse *synapse = 0;
2262 Double_t *origin =
new Double_t[fNetwork.GetEntriesFast() +
2263 fSynapses.GetEntriesFast()];
2264 nentries = fNetwork.GetEntriesFast();
2265 for (j=0;j<nentries;j++) {
2266 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
2267 origin[idx++] = neuron->GetWeight();
2269 nentries = fSynapses.GetEntriesFast();
2270 for (j=0;j<nentries;j++) {
2271 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
2272 origin[idx++] = synapse->GetWeight();
2276 Double_t err1 = GetError(kTraining);
2277 Double_t alpha1 = 0.;
2278 Double_t alpha2 = fLastAlpha;
2283 Double_t alpha3 = alpha2;
2284 MLP_Line(origin, direction, alpha2);
2285 Double_t err2 = GetError(kTraining);
2286 Double_t err3 = err2;
2287 Bool_t bingo =
false;
2290 for (icount = 0; icount < 100; icount++) {
2292 MLP_Line(origin, direction, alpha3);
2293 err3 = GetError(kTraining);
2304 MLP_Line(origin, direction, 0.);
2309 for (icount = 0; icount < 100; icount++) {
2311 MLP_Line(origin, direction, alpha2);
2312 err2 = GetError(kTraining);
2321 MLP_Line(origin, direction, 0.);
2328 fLastAlpha = 0.5 * (alpha1 + alpha3 -
2329 (err3 - err1) / ((err3 - err2) / (alpha3 - alpha2)
2330 - (err2 - err1) / (alpha2 - alpha1)));
2331 fLastAlpha = fLastAlpha < 10000 ? fLastAlpha : 10000;
2332 MLP_Line(origin, direction, fLastAlpha);
2333 GetError(kTraining);
2336 nentries = fNetwork.GetEntriesFast();
2337 for (j=0;j<nentries;j++) {
2338 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
2339 buffer[idx] = neuron->GetWeight() - origin[idx];
2342 nentries = fSynapses.GetEntriesFast();
2343 for (j=0;j<nentries;j++) {
2344 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
2345 buffer[idx] = synapse->GetWeight() - origin[idx];
2360 void TMultiLayerPerceptron::ConjugateGradientsDir(Double_t * dir, Double_t beta)
2364 TNeuron *neuron = 0;
2365 TSynapse *synapse = 0;
2366 nentries = fNetwork.GetEntriesFast();
2367 for (j=0;j<nentries;j++) {
2368 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
2369 dir[idx] = -neuron->GetDEDw() + beta * dir[idx];
2372 nentries = fSynapses.GetEntriesFast();
2373 for (j=0;j<nentries;j++) {
2374 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
2375 dir[idx] = -synapse->GetDEDw() + beta * dir[idx];
2386 bool TMultiLayerPerceptron::GetBFGSH(TMatrixD & bfgsh, TMatrixD & gamma, TMatrixD & delta)
2388 TMatrixD gd(gamma, TMatrixD::kTransposeMult, delta);
2389 if ((Double_t) gd[0][0] == 0.)
2391 TMatrixD aHg(bfgsh, TMatrixD::kMult, gamma);
2392 TMatrixD tmp(gamma, TMatrixD::kTransposeMult, bfgsh);
2393 TMatrixD gHg(gamma, TMatrixD::kTransposeMult, aHg);
2394 Double_t a = 1 / (Double_t) gd[0][0];
2395 Double_t f = 1 + ((Double_t) gHg[0][0] * a);
2396 TMatrixD res( TMatrixD(delta, TMatrixD::kMult,
2397 TMatrixD(TMatrixD::kTransposed, delta)));
2399 res -= (TMatrixD(delta, TMatrixD::kMult, tmp) +
2400 TMatrixD(aHg, TMatrixD::kMult,
2401 TMatrixD(TMatrixD::kTransposed, delta)));
2412 void TMultiLayerPerceptron::SetGammaDelta(TMatrixD & gamma, TMatrixD & delta,
2415 Int_t els = fNetwork.GetEntriesFast() + fSynapses.GetEntriesFast();
2418 TNeuron *neuron = 0;
2419 TSynapse *synapse = 0;
2420 nentries = fNetwork.GetEntriesFast();
2421 for (j=0;j<nentries;j++) {
2422 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
2423 gamma[idx++][0] = -neuron->GetDEDw();
2425 nentries = fSynapses.GetEntriesFast();
2426 for (j=0;j<nentries;j++) {
2427 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
2428 gamma[idx++][0] = -synapse->GetDEDw();
2430 for (Int_t i = 0; i < els; i++)
2431 delta[i].Assign(buffer[i]);
2435 nentries = fNetwork.GetEntriesFast();
2436 for (j=0;j<nentries;j++) {
2437 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
2438 gamma[idx++][0] += neuron->GetDEDw();
2440 nentries = fSynapses.GetEntriesFast();
2441 for (j=0;j<nentries;j++) {
2442 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
2443 gamma[idx++][0] += synapse->GetDEDw();
2451 Double_t TMultiLayerPerceptron::DerivDir(Double_t * dir)
2455 Double_t output = 0;
2456 TNeuron *neuron = 0;
2457 TSynapse *synapse = 0;
2458 nentries = fNetwork.GetEntriesFast();
2459 for (j=0;j<nentries;j++) {
2460 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
2461 output += neuron->GetDEDw() * dir[idx++];
2463 nentries = fSynapses.GetEntriesFast();
2464 for (j=0;j<nentries;j++) {
2465 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
2466 output += synapse->GetDEDw() * dir[idx++];
2475 void TMultiLayerPerceptron::BFGSDir(TMatrixD & bfgsh, Double_t * dir)
2477 Int_t els = fNetwork.GetEntriesFast() + fSynapses.GetEntriesFast();
2478 TMatrixD dedw(els, 1);
2481 TNeuron *neuron = 0;
2482 TSynapse *synapse = 0;
2483 nentries = fNetwork.GetEntriesFast();
2484 for (j=0;j<nentries;j++) {
2485 neuron = (TNeuron *) fNetwork.UncheckedAt(j);
2486 dedw[idx++][0] = neuron->GetDEDw();
2488 nentries = fSynapses.GetEntriesFast();
2489 for (j=0;j<nentries;j++) {
2490 synapse = (TSynapse *) fSynapses.UncheckedAt(j);
2491 dedw[idx++][0] = synapse->GetDEDw();
2493 TMatrixD direction(bfgsh, TMatrixD::kMult, dedw);
2494 for (Int_t i = 0; i < els; i++)
2495 dir[i] = -direction[i][0];
2505 void TMultiLayerPerceptron::Draw(Option_t * )
2507 #define NeuronSize 2.5
2509 Int_t nLayers = fStructure.CountChar(
':')+1;
2510 Float_t xStep = 1./(nLayers+1.);
2512 for(layer=0; layer< nLayers-1; layer++) {
2513 Float_t nNeurons_this = 0;
2515 TString input = TString(fStructure(0, fStructure.First(
':')));
2516 nNeurons_this = input.CountChar(
',')+1;
2520 TString hidden = TString(fStructure(fStructure.First(
':') + 1,fStructure.Last(
':') - fStructure.First(
':') - 1));
2522 Int_t end = hidden.Index(
":", beg + 1);
2524 Int_t num = atoi(TString(hidden(beg, end - beg)).Data());
2527 end = hidden.Index(
":", beg + 1);
2528 if(layer==cnt) nNeurons_this = num;
2530 Int_t num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
2532 if(layer==cnt) nNeurons_this = num;
2534 Float_t nNeurons_next = 0;
2535 if(layer==nLayers-2) {
2536 TString output = TString(fStructure(fStructure.Last(
':') + 1,fStructure.Length() - fStructure.Last(
':')));
2537 nNeurons_next = output.CountChar(
',')+1;
2541 TString hidden = TString(fStructure(fStructure.First(
':') + 1,fStructure.Last(
':') - fStructure.First(
':') - 1));
2543 Int_t end = hidden.Index(
":", beg + 1);
2545 Int_t num = atoi(TString(hidden(beg, end - beg)).Data());
2548 end = hidden.Index(
":", beg + 1);
2549 if(layer+1==cnt) nNeurons_next = num;
2551 Int_t num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
2553 if(layer+1==cnt) nNeurons_next = num;
2555 Float_t yStep_this = 1./(nNeurons_this+1.);
2556 Float_t yStep_next = 1./(nNeurons_next+1.);
2557 TObjArrayIter* it = (TObjArrayIter *) fSynapses.MakeIterator();
2558 TSynapse *theSynapse = 0;
2559 Float_t maxWeight = 0;
2560 while ((theSynapse = (TSynapse *) it->Next()))
2561 maxWeight = maxWeight < theSynapse->GetWeight() ? theSynapse->GetWeight() : maxWeight;
2563 it = (TObjArrayIter *) fSynapses.MakeIterator();
2564 for(Int_t neuron1=0; neuron1<nNeurons_this; neuron1++) {
2565 for(Int_t neuron2=0; neuron2<nNeurons_next; neuron2++) {
2566 TLine* synapse =
new TLine(xStep*(layer+1),yStep_this*(neuron1+1),xStep*(layer+2),yStep_next*(neuron2+1));
2568 theSynapse = (TSynapse *) it->Next();
2569 if (!theSynapse)
continue;
2570 synapse->SetLineWidth(Int_t((theSynapse->GetWeight()/maxWeight)*10.));
2571 synapse->SetLineStyle(1);
2572 if(((TMath::Abs(theSynapse->GetWeight())/maxWeight)*10.)<0.5) synapse->SetLineStyle(2);
2573 if(((TMath::Abs(theSynapse->GetWeight())/maxWeight)*10.)<0.25) synapse->SetLineStyle(3);
2578 for(layer=0; layer< nLayers; layer++) {
2579 Float_t nNeurons = 0;
2581 TString input = TString(fStructure(0, fStructure.First(
':')));
2582 nNeurons = input.CountChar(
',')+1;
2584 else if(layer==nLayers-1) {
2585 TString output = TString(fStructure(fStructure.Last(
':') + 1,fStructure.Length() - fStructure.Last(
':')));
2586 nNeurons = output.CountChar(
',')+1;
2590 TString hidden = TString(fStructure(fStructure.First(
':') + 1,fStructure.Last(
':') - fStructure.First(
':') - 1));
2592 Int_t end = hidden.Index(
":", beg + 1);
2594 Int_t num = atoi(TString(hidden(beg, end - beg)).Data());
2597 end = hidden.Index(
":", beg + 1);
2598 if(layer==cnt) nNeurons = num;
2600 Int_t num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
2602 if(layer==cnt) nNeurons = num;
2604 Float_t yStep = 1./(nNeurons+1.);
2605 for(Int_t neuron=0; neuron<nNeurons; neuron++) {
2606 TMarker* m =
new TMarker(xStep*(layer+1),yStep*(neuron+1),20);
2607 m->SetMarkerColor(4);
2608 m->SetMarkerSize(NeuronSize);
2612 const TString input = TString(fStructure(0, fStructure.First(
':')));
2613 const TObjArray *inpL = input.Tokenize(
" ,");
2614 const Int_t nrItems = inpL->GetLast()+1;
2615 Float_t yStep = 1./(nrItems+1);
2616 for (Int_t item = 0; item < nrItems; item++) {
2617 const TString brName = ((TObjString *)inpL->At(item))->GetString();
2618 TText* label =
new TText(0.5*xStep,yStep*(item+1),brName.Data());
2623 Int_t numOutNodes=fLastLayer.GetEntriesFast();
2624 yStep=1./(numOutNodes+1);
2625 for (Int_t outnode=0; outnode<numOutNodes; outnode++) {
2626 TNeuron* neuron=(TNeuron*)fLastLayer[outnode];
2627 if (neuron && neuron->GetName()) {
2628 TText* label =
new TText(xStep*nLayers,