58 TMVA::DataSet::DataSet(
const DataSetInfo& dsi)
59 :TNamed(dsi.GetName(),
"DataSet"),
64 fHasNegativeEventWeights(kFALSE),
65 fLogger( new MsgLogger(TString(TString(
"Dataset:")+dsi.GetName()).Data()) ),
69 fClassEvents.resize(4);
70 fBlockBelongToTraining.reserve(10);
71 fBlockBelongToTraining.push_back(kTRUE);
77 fSampling.resize( treeNum );
78 fSamplingNEvents.resize( treeNum );
79 fSamplingWeight.resize(treeNum);
81 for (Int_t treeIdx = 0; treeIdx < treeNum; treeIdx++) {
82 fSampling.at(treeIdx) = kFALSE;
83 fSamplingNEvents.at(treeIdx) = 0;
84 fSamplingWeight.at(treeIdx) = 1.0;
91 TMVA::DataSet::DataSet()
92 :fdsi(new DataSetInfo(GetName())),
96 fHasNegativeEventWeights(kFALSE),
97 fLogger( new MsgLogger(TString(TString(
"Dataset:")+GetName()).Data()) ),
101 fClassEvents.resize(4);
102 fBlockBelongToTraining.reserve(10);
103 fBlockBelongToTraining.push_back(kTRUE);
109 fSampling.resize( treeNum );
110 fSamplingNEvents.resize( treeNum );
111 fSamplingWeight.resize(treeNum);
113 for (Int_t treeIdx = 0; treeIdx < treeNum; treeIdx++) {
114 fSampling.at(treeIdx) = kFALSE;
115 fSamplingNEvents.at(treeIdx) = 0;
116 fSamplingWeight.at(treeIdx) = 1.0;
123 TMVA::DataSet::~DataSet()
126 Bool_t deleteEvents=
true;
127 DestroyCollection( Types::kTraining, deleteEvents );
128 DestroyCollection( Types::kTesting, deleteEvents );
130 fBlockBelongToTraining.clear();
132 for (std::vector< std::map< TString, Results* > >::iterator it = fResults.begin(); it != fResults.end(); ++it) {
133 for (std::map< TString, Results* >::iterator itMap = (*it).begin(); itMap != (*it).end(); ++itMap) {
134 delete itMap->second;
139 if (fSamplingRandom != 0 )
delete fSamplingRandom;
143 DestroyCollection( Types::kValidation, deleteEvents );
144 DestroyCollection( Types::kTrainingOriginal, deleteEvents );
151 void TMVA::DataSet::IncrementNClassEvents( Int_t type, UInt_t classNumber )
153 if (fClassEvents.size()<(UInt_t)(type+1)) fClassEvents.resize( type+1 );
154 if (fClassEvents.at( type ).size() < classNumber+1) fClassEvents.at( type ).resize( classNumber+1 );
155 fClassEvents.at( type ).at( classNumber ) += 1;
160 void TMVA::DataSet::ClearNClassEvents( Int_t type )
162 if (fClassEvents.size()<(UInt_t)(type+1)) fClassEvents.resize( type+1 );
163 fClassEvents.at( type ).clear();
168 Long64_t TMVA::DataSet::GetNClassEvents( Int_t type, UInt_t classNumber )
171 return fClassEvents.at(type).at(classNumber);
173 catch (std::out_of_range &) {
174 ClassInfo* ci = fdsi->GetClassInfo( classNumber );
175 Log() << kFATAL << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"No " << (type==0?
"training":(type==1?
"testing":
"_unknown_type_"))
176 <<
" events for class " << (ci==NULL?
"_no_name_known_":ci->GetName()) <<
" (index # "<<classNumber<<
")"
177 <<
" available. Check if all class names are spelled correctly and if events are"
178 <<
" passing the selection cuts." << Endl;
181 Log() << kFATAL << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"ERROR/CAUGHT : DataSet/GetNClassEvents, .. unknown error" << Endl;
189 void TMVA::DataSet::DestroyCollection(Types::ETreeType type, Bool_t deleteEvents )
191 UInt_t i = TreeIndex(type);
192 if (i>=fEventCollection.size() || fEventCollection[i].size()==0)
return;
195 for (UInt_t j=0; j<fEventCollection[i].size(); j++)
delete fEventCollection[i][j];
197 fEventCollection[i].clear();
202 const TMVA::Event* TMVA::DataSet::GetEvent()
const
204 if (fSampling.size() > UInt_t(fCurrentTreeIdx) && fSampling.at(fCurrentTreeIdx)) {
205 Long64_t iEvt = fSamplingSelected.at(fCurrentTreeIdx).at( fCurrentEventIdx ).second;
206 return ((fEventCollection.at(fCurrentTreeIdx))).at(iEvt);
209 return ((fEventCollection.at(fCurrentTreeIdx))).at(fCurrentEventIdx);
216 UInt_t TMVA::DataSet::GetNVariables()
const
218 return fdsi->GetNVariables();
224 UInt_t TMVA::DataSet::GetNTargets()
const
226 return fdsi->GetNTargets();
232 UInt_t TMVA::DataSet::GetNSpectators()
const
234 return fdsi->GetNSpectators();
241 void TMVA::DataSet::AddEvent(Event * ev, Types::ETreeType type)
243 fEventCollection.at(Int_t(type)).push_back(ev);
244 if (ev->GetWeight()<0) fHasNegativeEventWeights = kTRUE;
250 void TMVA::DataSet::SetEventCollection(std::vector<TMVA::Event*>* events, Types::ETreeType type, Bool_t deleteEvents)
252 DestroyCollection(type,deleteEvents);
254 const Int_t t = TreeIndex(type);
255 ClearNClassEvents( type );
257 fEventCollection.at(t) = *events;
258 for (std::vector<Event*>::iterator it = fEventCollection.at(t).begin(); it < fEventCollection.at(t).end(); ++it) {
259 IncrementNClassEvents( t, (*it)->GetClass() );
265 TMVA::Results* TMVA::DataSet::GetResults(
const TString & resultsName,
266 Types::ETreeType type,
267 Types::EAnalysisType analysistype )
269 UInt_t t = TreeIndex(type);
270 if (t<fResults.size()) {
271 const std::map< TString, Results* >& resultsForType = fResults[t];
272 std::map< TString, Results* >::const_iterator it = resultsForType.find(resultsName);
273 if (it!=resultsForType.end()) {
279 fResults.resize(t+1);
284 Results * newresults = 0;
285 switch(analysistype) {
286 case Types::kClassification:
287 newresults =
new ResultsClassification(fdsi,resultsName);
289 case Types::kRegression:
290 newresults =
new ResultsRegression(fdsi,resultsName);
292 case Types::kMulticlass:
293 newresults =
new ResultsMulticlass(fdsi,resultsName);
295 case Types::kNoAnalysisType:
296 newresults =
new ResultsClassification(fdsi,resultsName);
298 case Types::kMaxAnalysisType:
304 newresults->SetTreeType( type );
305 fResults[t][resultsName] = newresults;
316 void TMVA::DataSet::DeleteResults(
const TString & resultsName,
317 Types::ETreeType type,
318 Types::EAnalysisType )
320 if (fResults.empty())
return;
322 if (UInt_t(type) > fResults.size()){
323 Log()<<kFATAL<< Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"you asked for an Treetype (training/testing/...)"
324 <<
" whose index " << type <<
" does not exist " << Endl;
326 std::map< TString, Results* >& resultsForType = fResults[UInt_t(type)];
327 std::map< TString, Results* >::iterator it = resultsForType.find(resultsName);
328 if (it!=resultsForType.end()) {
329 Log() << kDEBUG << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
" Delete Results previous existing result:" << resultsName
330 <<
" of type " << type << Endl;
332 resultsForType.erase(it->first);
335 Log() << kINFO << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"could not fine Result class of " << resultsName
336 <<
" of type " << type <<
" which I should have deleted" << Endl;
343 void TMVA::DataSet::DeleteAllResults(Types::ETreeType type,
344 Types::EAnalysisType )
346 if (fResults.empty())
return;
348 if (UInt_t(type) > fResults.size()){
349 Log()<<kFATAL<< Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"you asked for an Treetype (training/testing/...)"
350 <<
" whose index " << type <<
" does not exist " << Endl;
353 std::map<TString, Results *> & resultsForType = fResults[UInt_t(type)];
355 for (
auto && it : resultsForType) {
356 auto & resultsName = it.first;
358 Log() << kDEBUG << Form(
"Dataset[%s] : ", fdsi->GetName())
359 <<
" DeleteAllResults previous existing result: "
360 << resultsName <<
" of type " << type << Endl;
365 resultsForType.clear();
371 void TMVA::DataSet::DivideTrainingSet( UInt_t blockNum )
373 Int_t tOrg = TreeIndex(Types::kTrainingOriginal),tTrn = TreeIndex(Types::kTraining);
375 if (fBlockBelongToTraining.size() == blockNum)
return;
377 if (fBlockBelongToTraining.size() == 1) {
378 if (fEventCollection[tOrg].size() == 0)
379 fEventCollection[tOrg].resize(fEventCollection[tTrn].size());
380 fEventCollection[tOrg].clear();
381 for (UInt_t i=0; i<fEventCollection[tTrn].size(); i++)
382 fEventCollection[tOrg].push_back(fEventCollection[tTrn][i]);
383 fClassEvents[tOrg] = fClassEvents[tTrn];
386 fBlockBelongToTraining.clear();
387 for (UInt_t i=0 ; i < blockNum ; i++) fBlockBelongToTraining.push_back(kTRUE);
389 ApplyTrainingSetDivision();
395 void TMVA::DataSet::ApplyTrainingSetDivision()
397 Int_t tOrg = TreeIndex(Types::kTrainingOriginal), tTrn = TreeIndex(Types::kTraining), tVld = TreeIndex(Types::kValidation);
398 fEventCollection[tTrn].clear();
399 if (fEventCollection[tVld].size()==0)
400 fEventCollection[tVld].resize(fEventCollection[tOrg].size());
401 fEventCollection[tVld].clear();
404 for (UInt_t i=0; i<fEventCollection[tOrg].size(); i++) {
405 if (fBlockBelongToTraining[i % fBlockBelongToTraining.size()])
406 fEventCollection[tTrn].push_back(fEventCollection[tOrg][i]);
408 fEventCollection[tVld].push_back(fEventCollection[tOrg][i]);
415 void TMVA::DataSet::MoveTrainingBlock( Int_t blockInd,Types::ETreeType dest, Bool_t applyChanges )
417 if (dest == Types::kValidation)
418 fBlockBelongToTraining[blockInd]=kFALSE;
420 fBlockBelongToTraining[blockInd]=kTRUE;
421 if (applyChanges) ApplyTrainingSetDivision();
427 Long64_t TMVA::DataSet::GetNEvtSigTest()
429 return GetNClassEvents(Types::kTesting, fdsi->GetClassInfo(
"Signal")->GetNumber() );
435 Long64_t TMVA::DataSet::GetNEvtBkgdTest()
437 return GetNClassEvents(Types::kTesting, fdsi->GetClassInfo(
"Background")->GetNumber() );
443 Long64_t TMVA::DataSet::GetNEvtSigTrain()
445 return GetNClassEvents(Types::kTraining, fdsi->GetClassInfo(
"Signal")->GetNumber() );
451 Long64_t TMVA::DataSet::GetNEvtBkgdTrain()
453 return GetNClassEvents(Types::kTraining, fdsi->GetClassInfo(
"Background")->GetNumber() );
459 void TMVA::DataSet::InitSampling( Float_t fraction, Float_t weight, UInt_t seed )
462 if (fSamplingRandom == 0 ) fSamplingRandom =
new TRandom3( seed );
465 std::vector< std::pair< Float_t, Long64_t >* > evtList;
467 Int_t treeIdx = TreeIndex( GetCurrentType() );
469 if (fSamplingEventList.size() < UInt_t(treeIdx+1) ) fSamplingEventList.resize(treeIdx+1);
470 if (fSamplingSelected.size() < UInt_t(treeIdx+1) ) fSamplingSelected.resize(treeIdx+1);
472 fSamplingEventList.at(treeIdx).clear();
473 fSamplingSelected.at(treeIdx).clear();
475 if (fSampling.size() < UInt_t(treeIdx+1) ) fSampling.resize(treeIdx+1);
476 if (fSamplingNEvents.size() < UInt_t(treeIdx+1) ) fSamplingNEvents.resize(treeIdx+1);
477 if (fSamplingWeight.size() < UInt_t(treeIdx+1) ) fSamplingWeight.resize(treeIdx+1);
479 if (fraction > 0.999999 || fraction < 0.0000001) {
480 fSampling.at( treeIdx ) =
false;
481 fSamplingNEvents.at( treeIdx ) = 0;
482 fSamplingWeight.at( treeIdx ) = 1.0;
487 fSampling.at( treeIdx ) =
false;
489 fSamplingNEvents.at( treeIdx ) = Int_t(fraction*GetNEvents());
490 fSamplingWeight.at( treeIdx ) = weight;
492 Long64_t nEvts = GetNEvents();
493 fSamplingEventList.at( treeIdx ).reserve( nEvts );
494 fSamplingSelected.at( treeIdx ).reserve( fSamplingNEvents.at(treeIdx) );
495 for (Long64_t ievt=0; ievt<nEvts; ievt++) {
496 std::pair<Float_t,Long64_t> p(1.0,ievt);
497 fSamplingEventList.at( treeIdx ).push_back( p );
501 fSampling.at( treeIdx ) =
true;
508 void TMVA::DataSet::CreateSampling()
const
510 Int_t treeIdx = TreeIndex( GetCurrentType() );
512 if (!fSampling.at(treeIdx) )
return;
514 if (fSamplingRandom == 0 )
515 Log() << kFATAL<< Form(
"Dataset[%s] : ",fdsi->GetName())
516 <<
"no random generator present for creating a random/importance sampling (initialized?)" << Endl;
519 fSamplingSelected.at(treeIdx).clear();
522 std::vector< std::pair< Float_t, Long64_t > > evtList;
523 std::vector< std::pair< Float_t, Long64_t > >::iterator evtListIt;
526 Float_t sumWeights = 0;
529 evtList.assign( fSamplingEventList.at(treeIdx).begin(), fSamplingEventList.at(treeIdx).end() );
532 for (evtListIt = evtList.begin(); evtListIt != evtList.end(); ++evtListIt) {
533 sumWeights += (*evtListIt).first;
535 evtListIt = evtList.begin();
538 std::vector< Float_t > rnds;
539 rnds.reserve(fSamplingNEvents.at(treeIdx));
542 for (Int_t i = 0; i < fSamplingNEvents.at(treeIdx); i++) {
543 pos = fSamplingRandom->Rndm()*sumWeights;
544 rnds.push_back( pos );
548 std::sort(rnds.begin(),rnds.end());
551 std::vector< Float_t >::iterator rndsIt = rnds.begin();
552 Float_t runningSum = 0.000000001;
553 for (evtListIt = evtList.begin(); evtListIt != evtList.end();) {
554 runningSum += (*evtListIt).first;
555 if (runningSum >= (*rndsIt)) {
556 fSamplingSelected.at(treeIdx).push_back( (*evtListIt) );
557 evtListIt = evtList.erase( evtListIt );
560 if (rndsIt == rnds.end() )
break;
572 void TMVA::DataSet::EventResult( Bool_t successful, Long64_t evtNumber )
575 if (!fSampling.at(fCurrentTreeIdx))
return;
576 if (fSamplingWeight.at(fCurrentTreeIdx) > 0.99999999999)
return;
579 Long64_t stop = fSamplingEventList.at(fCurrentTreeIdx).size() -1;
580 if (evtNumber >= 0) {
584 for ( Long64_t iEvt = start; iEvt <= stop; iEvt++ ){
585 if (Long64_t(fSamplingEventList.at(fCurrentTreeIdx).size()) < iEvt) {
586 Log() << kWARNING << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"event number (" << iEvt
587 <<
") larger than number of sampled events ("
588 << fSamplingEventList.at(fCurrentTreeIdx).size() <<
" of tree " << fCurrentTreeIdx <<
")" << Endl;
591 Float_t weight = fSamplingEventList.at(fCurrentTreeIdx).at( iEvt ).first;
594 weight /= fSamplingWeight.at(fCurrentTreeIdx);
595 if (weight > 1.0 ) weight = 1.0;
599 weight *= fSamplingWeight.at(fCurrentTreeIdx);
601 fSamplingEventList.at(fCurrentTreeIdx).at( iEvt ).first = weight;
609 TTree* TMVA::DataSet::GetTree( Types::ETreeType type )
611 Log() << kDEBUG << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"GetTree(" << ( type==Types::kTraining ?
"training" :
"testing" ) <<
")" << Endl;
615 if (type!=Types::kTraining && type!=Types::kTesting)
return 0;
617 Types::ETreeType savedType = GetCurrentType();
619 SetCurrentType(type);
620 const UInt_t t = TreeIndex(type);
621 if (fResults.size() <= t) {
622 Log() << kWARNING << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"No results for treetype " << ( type==Types::kTraining ?
"training" :
"testing" )
623 <<
" found. Size=" << fResults.size() << Endl;
627 TString treeName( (type == Types::kTraining ?
"TrainTree" :
"TestTree" ) );
628 TTree *tree =
new TTree(treeName,treeName);
630 Float_t *varVals =
new Float_t[fdsi->GetNVariables()];
631 Float_t *tgtVals =
new Float_t[fdsi->GetNTargets()];
632 Float_t *visVals =
new Float_t[fdsi->GetNSpectators()];
642 Float_t **metVals =
new Float_t*[fResults.at(t).size()];
643 for(UInt_t i=0; i<fResults.at(t).size(); i++ )
644 metVals[i] =
new Float_t[fdsi->GetNTargets()+fdsi->GetNClasses()];
647 tree->Branch(
"classID", &cls,
"classID/I" );
648 tree->Branch(
"className", className,
"className/C" );
652 Int_t ivar_array = 0;
653 Int_t arraySize = -1;
654 for (std::vector<VariableInfo>::const_iterator itVars = fdsi->GetVariableInfos().begin();
655 itVars != fdsi->GetVariableInfos().end(); ++itVars) {
658 if (!itVars->TestBit(DataSetInfo::kIsArrayVariable) )
659 tree->Branch( (*itVars).GetInternalName(), &varVals[n], (*itVars).GetInternalName()+TString(
"/F") );
662 if (ivar_array == 0) {
663 TString name = (*itVars).GetInternalName();
664 name.ReplaceAll(
"[0]",
"");
665 arraySize = fdsi->GetVarArraySize((*itVars).GetExpression());
666 tree->Branch(name, &varVals[n], name + TString::Format(
"[%d]/F", arraySize));
667 Log() << kDEBUG <<
"creating branch for array " << name <<
" with size " << arraySize << Endl;
670 if (ivar_array == arraySize)
677 for (std::vector<VariableInfo>::const_iterator itTgts = fdsi->GetTargetInfos().begin();
678 itTgts != fdsi->GetTargetInfos().end(); ++itTgts) {
680 tree->Branch( (*itTgts).GetInternalName(), &tgtVals[n], (*itTgts).GetInternalName()+TString(
"/F") );
685 for (std::vector<VariableInfo>::const_iterator itVis = fdsi->GetSpectatorInfos().begin();
686 itVis != fdsi->GetSpectatorInfos().end(); ++itVis) {
688 tree->Branch( (*itVis).GetInternalName(), &visVals[n], (*itVis).GetInternalName()+TString(
"/F") );
692 tree->Branch(
"weight", &weight,
"weight/F" );
696 for (std::map< TString, Results* >::iterator itMethod = fResults.at(t).begin();
697 itMethod != fResults.at(t).end(); ++itMethod) {
700 Log() << kDEBUG << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"analysis type: " << (itMethod->second->GetAnalysisType()==Types::kRegression ?
"Regression" :
701 (itMethod->second->GetAnalysisType()==Types::kMulticlass ?
"Multiclass" :
"Classification" )) << Endl;
703 if (itMethod->second->GetAnalysisType() == Types::kClassification) {
705 tree->Branch( itMethod->first, &(metVals[n][0]), itMethod->first +
"/F" );
707 else if (itMethod->second->GetAnalysisType() == Types::kMulticlass) {
709 TString leafList(
"");
710 for (UInt_t iCls = 0; iCls < fdsi->GetNClasses(); iCls++) {
711 if (iCls > 0) leafList.Append(
":" );
712 leafList.Append( fdsi->GetClassInfo( iCls )->GetName() );
713 leafList.Append(
"/F" );
715 Log() << kDEBUG << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"itMethod->first " << itMethod->first <<
" LEAFLIST: "
716 << leafList <<
" itMethod->second " << itMethod->second << Endl;
717 tree->Branch( itMethod->first, (metVals[n]), leafList );
719 else if (itMethod->second->GetAnalysisType() == Types::kRegression) {
721 TString leafList(
"");
722 for (UInt_t iTgt = 0; iTgt < fdsi->GetNTargets(); iTgt++) {
723 if (iTgt > 0) leafList.Append(
":" );
724 leafList.Append( fdsi->GetTargetInfo( iTgt ).GetInternalName() );
726 leafList.Append(
"/F" );
728 Log() << kDEBUG << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"itMethod->first " << itMethod->first <<
" LEAFLIST: "
729 << leafList <<
" itMethod->second " << itMethod->second << Endl;
730 tree->Branch( itMethod->first, (metVals[n]), leafList );
733 Log() << kWARNING << Form(
"Dataset[%s] : ",fdsi->GetName()) <<
"Unknown analysis type for result found when writing TestTree." << Endl;
740 for (
auto && itMethod : fResults.at(t)) {
741 auto numEvents = GetNEvents(type);
742 auto results = itMethod.second;
743 auto resultsName = itMethod.first;
745 Long64_t numEventsResults = 0;
746 auto analysisType = results->GetAnalysisType();
747 if (analysisType == Types::kClassification) {
748 numEventsResults =
dynamic_cast<ResultsClassification *
>(results)->GetSize();
749 }
else if (analysisType == Types::kMulticlass) {
750 numEventsResults =
dynamic_cast<ResultsMulticlass *
>(results)->GetSize();
751 }
else if (analysisType == Types::kRegression) {
752 numEventsResults =
dynamic_cast<ResultsRegression *
>(results)->GetSize();
754 Log() << kFATAL <<
"Unexpected analysisType." << Endl;
757 if (numEventsResults != numEvents) {
758 Log() << kFATAL <<
"An error occurred in DataSet::GetTree. "
759 "Inconsistent size of result for result with name '"
760 << resultsName <<
"'."
761 <<
" Size is '" << std::to_string(numEventsResults)
763 <<
" Expected '" << numEvents <<
"'." << Endl;
768 for (Long64_t iEvt = 0; iEvt < GetNEvents( type ); iEvt++) {
770 const Event* ev = GetEvent( iEvt );
772 cls = ev->GetClass();
773 weight = ev->GetWeight();
774 strlcpy(className, fdsi->GetClassInfo( cls )->GetName(),
sizeof(className));
777 for (UInt_t ivar = 0; ivar < ev->GetNVariables(); ivar++) varVals[ivar] = ev->GetValue( ivar );
778 for (UInt_t itgt = 0; itgt < ev->GetNTargets(); itgt++) tgtVals[itgt] = ev->GetTarget( itgt );
779 for (UInt_t ivis = 0; ivis < ev->GetNSpectators(); ivis++) visVals[ivis] = ev->GetSpectator( ivis );
784 for (
auto && itMethod : fResults.at(t)) {
785 auto & results = *itMethod.second;
786 auto analysisType = results.GetAnalysisType();
788 auto const & vals = results[iEvt];
790 if (analysisType == Types::kClassification) {
791 metVals[iMethod][0] = vals[0];
792 }
else if (analysisType == Types::kMulticlass) {
793 for (UInt_t nCls = 0; nCls < fdsi->GetNClasses(); nCls++) {
794 Float_t val = vals.at(nCls);
795 metVals[iMethod][nCls] = val;
797 }
else if (analysisType == Types::kRegression) {
798 for (UInt_t nTgts = 0; nTgts < fdsi->GetNTargets(); nTgts++) {
799 Float_t val = vals.at(nTgts);
800 metVals[iMethod][nTgts] = val;
810 <<
"Created tree '" << tree->GetName() <<
"' with " << tree->GetEntries() <<
" events" << Endl << Endl;
812 SetCurrentType(savedType);
818 for(UInt_t i=0; i<fResults.at(t).size(); i++ )