34 void TMVAMulticlass( TString myMethodList =
"" )
38 TMVA::Tools::Instance();
51 std::map<std::string,int> Use;
59 std::cout << std::endl;
60 std::cout <<
"==> Start TMVAMulticlass" << std::endl;
62 if (myMethodList !=
"") {
63 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
65 std::vector<TString> mlist = TMVA::gTools().SplitString( myMethodList,
',' );
66 for (UInt_t i=0; i<mlist.size(); i++) {
67 std::string regMethod(mlist[i]);
69 if (Use.find(regMethod) == Use.end()) {
70 std::cout <<
"Method \"" << regMethod <<
"\" not known in TMVA under this name. Choose among the following:" << std::endl;
71 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first <<
" ";
72 std::cout << std::endl;
80 TString outfileName =
"TMVAMulticlass.root";
81 TFile* outputFile = TFile::Open( outfileName,
"RECREATE" );
83 TMVA::Factory *factory =
new TMVA::Factory(
"TMVAMulticlass", outputFile,
84 "!V:!Silent:Color:DrawProgressBar:Transformations=I;D;P;G,D:AnalysisType=multiclass" );
85 TMVA::DataLoader *dataloader=
new TMVA::DataLoader(
"dataset");
87 dataloader->AddVariable(
"var1",
'F' );
88 dataloader->AddVariable(
"var2",
"Variable 2",
"",
'F' );
89 dataloader->AddVariable(
"var3",
"Variable 3",
"units",
'F' );
90 dataloader->AddVariable(
"var4",
"Variable 4",
"units",
'F' );
93 TString fname =
"./tmva_example_multiple_background.root";
94 if (!gSystem->AccessPathName( fname )) {
96 std::cout <<
"--- TMVAMulticlass : Accessing " << fname << std::endl;
97 input = TFile::Open( fname );
100 std::cout <<
"Creating testdata...." << std::endl;
101 TString createDataMacro = gROOT->GetTutorialDir() +
"/tmva/createData.C";
102 gROOT->ProcessLine(TString::Format(
".L %s",createDataMacro.Data()));
103 gROOT->ProcessLine(
"create_MultipleBackground(2000)");
104 std::cout <<
" created tmva_example_multiple_background.root for tests of the multiclass features"<<std::endl;
105 input = TFile::Open( fname );
108 std::cout <<
"ERROR: could not open data file" << std::endl;
112 TTree *signalTree = (TTree*)input->Get(
"TreeS");
113 TTree *background0 = (TTree*)input->Get(
"TreeB0");
114 TTree *background1 = (TTree*)input->Get(
"TreeB1");
115 TTree *background2 = (TTree*)input->Get(
"TreeB2");
117 gROOT->cd( outfileName+TString(
":/") );
118 dataloader->AddTree (signalTree,
"Signal");
119 dataloader->AddTree (background0,
"bg0");
120 dataloader->AddTree (background1,
"bg1");
121 dataloader->AddTree (background2,
"bg2");
123 dataloader->PrepareTrainingAndTestTree(
"",
"SplitMode=Random:NormMode=NumEvents:!V" );
126 factory->BookMethod( dataloader, TMVA::Types::kBDT,
"BDTG",
"!H:!V:NTrees=1000:BoostType=Grad:Shrinkage=0.10:UseBaggedBoost:BaggedSampleFraction=0.50:nCuts=20:MaxDepth=2");
128 factory->BookMethod( dataloader, TMVA::Types::kMLP,
"MLP",
"!H:!V:NeuronType=tanh:NCycles=1000:HiddenLayers=N+5,5:TestRate=5:EstimatorType=MSE");
130 factory->BookMethod( dataloader, TMVA::Types::kFDA,
"FDA_GA",
"H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:PopSize=300:Cycles=3:Steps=20:Trim=True:SaveBestGen=1" );
132 factory->BookMethod( dataloader, TMVA::Types::kPDEFoam,
"PDEFoam",
"!H:!V:TailCut=0.001:VolFrac=0.0666:nActiveCells=500:nSampl=2000:nBin=5:Nmin=100:Kernel=None:Compress=T" );
134 if (Use[
"DNN_CPU"]) {
135 TString layoutString(
"Layout=TANH|100,TANH|50,TANH|10,LINEAR");
136 TString training0(
"LearningRate=1e-1, Momentum=0.5, Repetitions=1, ConvergenceSteps=10,"
137 " BatchSize=256, TestRepetitions=10, Multithreading=True");
138 TString training1(
"LearningRate=1e-2, Momentum=0.0, Repetitions=1, ConvergenceSteps=10,"
139 " BatchSize=256, TestRepetitions=7, Multithreading=True");
140 TString trainingStrategyString(
"TrainingStrategy=");
141 trainingStrategyString += training0 +
"|" + training1;
142 TString nnOptions(
"!H:V:ErrorStrategy=CROSSENTROPY:VarTransform=N:"
143 "WeightInitialization=XAVIERUNIFORM:Architecture=CPU");
144 nnOptions.Append(
":");
145 nnOptions.Append(layoutString);
146 nnOptions.Append(
":");
147 nnOptions.Append(trainingStrategyString);
148 factory->BookMethod(dataloader, TMVA::Types::kDNN,
"DNN_CPU", nnOptions);
152 factory->TrainAllMethods();
155 factory->TestAllMethods();
158 factory->EvaluateAllMethods();
165 std::cout <<
"==> Wrote root file: " << outputFile->GetName() << std::endl;
166 std::cout <<
"==> TMVAMulticlass is done!" << std::endl;
172 if (!gROOT->IsBatch()) TMVAMultiClassGui( outfileName );
177 int main(
int argc,
char** argv )
181 for (
int i=1; i<argc; i++) {
182 TString regMethod(argv[i]);
183 if(regMethod==
"-b" || regMethod==
"--batch")
continue;
184 if (!methodList.IsNull()) methodList += TString(
",");
185 methodList += regMethod;
187 TMVAMulticlass(methodList);