126 #pragma warning ( disable : 4355 ) 143 fAnalysisType (
Types::kNoAnalysisType ),
144 fRegressionReturnVal ( 0 ),
145 fMulticlassReturnVal ( 0 ),
146 fDisableWriting (
kFALSE ),
147 fDataSetInfo ( dsi ),
148 fSignalReferenceCut ( 0.5 ),
149 fSignalReferenceCutOrientation( 1. ),
150 fVariableTransformType (
Types::kSignal ),
151 fJobName ( jobName ),
152 fMethodName ( methodTitle ),
153 fMethodType ( methodType ),
157 fConstructedFromWeightFile (
kFALSE ),
159 fMethodBaseDir ( theBaseDir ),
170 fSplTrainEffBvsS ( 0 ),
171 fVarTransformString (
"None" ),
172 fTransformationPointer ( 0 ),
173 fTransformation ( dsi, methodTitle ),
175 fVerbosityLevelString (
"Default" ),
178 fIgnoreNegWeightsInTraining(
kFALSE ),
180 fBackgroundClass ( 0 ),
277 for (
Int_t i = 0; i < 2; i++ ) {
370 if (
DataInfo().GetClassInfo(
"Signal") != 0) {
373 if (
DataInfo().GetClassInfo(
"Background") != 0) {
398 DeclareOptionRef(
fVerbose,
"V",
"Verbose output (short form of \"VerbosityLevel\" below - overrides the latter one)" );
414 DeclareOptionRef(
fVarTransformString,
"VarTransform",
"List of variable transformations performed before training, e.g., \"D_Background,P_Signal,G,N_AllClasses\" for: \"Decorrelation, PCA-transformation, Gaussianisation, Normalisation, each for the given class of events ('AllClasses' denotes all events of all classes, if no class indication is given, 'All' is assumed)\"" );
421 "Events with negative weights are ignored in the training (but are included for testing and performance evaluation)" );
472 Log() <<
kFATAL <<
"<ProcessOptions> Verbosity level type '" 486 TString trafoDefinition(trafoDefinitionIn);
487 if (trafoDefinition ==
"None")
return;
491 TString trafoDefinitionCheck(trafoDefinitionIn);
492 int npartial = 0, ntrafo=0;
493 for (
Int_t pos = 0, siz = trafoDefinition.
Sizeof(); pos < siz; ++pos) {
494 TString ch = trafoDefinition(pos,1);
495 if ( ch ==
"(" ) npartial++;
496 if ( ch ==
"+" || ch ==
",") ntrafo++;
499 log <<
kWARNING <<
"The use of multiple partial variable transformations during the application phase can be properly invoked via the \"Reader\", but it is not yet implemented in \"MakeClass\", the creation mechanism for standalone C++ application classes. The standalone C++ class produced by this training job is thus INCOMPLETE AND MUST NOT BE USED! The transformation in question is: " << trafoDefinitionIn <<
Endl;
507 Int_t parenthesisCount = 0;
508 for (
Int_t position = 0, size = trafoDefinition.
Sizeof(); position < size; ++position) {
509 TString ch = trafoDefinition(position,1);
510 if (ch ==
"(") ++parenthesisCount;
511 else if (ch ==
")") --parenthesisCount;
512 else if (ch ==
"," && parenthesisCount == 0) trafoDefinition.
Replace(position,1,
'+');
518 TString tdef = os->GetString();
524 Ssiz_t parLen = tdef.
Index(
")", parStart )-parStart+1;
526 variables = tdef(parStart,parLen);
527 tdef.
Remove(parStart,parLen);
528 variables.
Remove(parLen-1,1);
534 if (trClsList->
GetSize() < 1) log <<
kFATAL <<
"Incorrect transformation string provided." <<
Endl;
541 if (trCls !=
"AllClasses") {
544 log <<
kFATAL <<
"Class " << trCls <<
" not known for variable transformation " 545 << trName <<
", please check." <<
Endl;
552 if (trName ==
"I" || trName ==
"Ident" || trName ==
"Identity") {
553 if (variables.
Length() == 0) variables =
"_V_";
556 else if (trName ==
"D" || trName ==
"Deco" || trName ==
"Decorrelate") {
557 if (variables.
Length() == 0) variables =
"_V_";
560 else if (trName ==
"P" || trName ==
"PCA") {
561 if (variables.
Length() == 0) variables =
"_V_";
564 else if (trName ==
"U" || trName ==
"Uniform") {
565 if (variables.
Length() == 0) variables =
"_V_,_T_";
568 else if (trName ==
"G" || trName ==
"Gauss") {
569 if (variables.
Length() == 0) variables =
"_V_";
572 else if (trName ==
"N" || trName ==
"Norm" || trName ==
"Normalise" || trName ==
"Normalize") {
573 if (variables.
Length() == 0) variables =
"_V_,_T_";
576 else log <<
kFATAL <<
"<ProcessOptions> Variable transform '" 577 << trName <<
"' unknown." <<
Endl;
579 if (transformation) {
582 log <<
kINFO <<
"Create Transformation \"" << trName <<
"\" with reference class " 583 << clsInfo->
GetName() <<
"=("<< idxCls <<
")"<<
Endl;
585 log <<
kINFO <<
"Create Transformation \"" << trName <<
"\" with events from all classes." <<
Endl;
604 "Use signal or background events to derive for variable transformation (the transformation is applied on both types of, course)" );
607 DeclareOptionRef(
fTxtWeightsOnly=kTRUE,
"TxtWeightFilesOnly",
"If True: write all training results (weights) as text files (False: some are written in ROOT format)" );
632 Log() <<
kWARNING <<
"Parameter optimization is not yet implemented for method " 634 Log() <<
kWARNING <<
"Currently we need to set hardcoded which parameter is tuned in which ranges"<<
Endl;
636 std::map<TString,Double_t> tunedParameters;
637 tunedParameters.size();
638 return tunedParameters;
675 Log() <<
kINFO <<
"Elapsed time for training with " << nEvents <<
" events: " 678 Log() <<
kINFO <<
"Create MVA output for ";
682 Log() <<
"Multiclass classification on training sample" <<
Endl;
687 Log() <<
"classification on training sample" <<
Endl;
696 Log() <<
"regression on training sample" <<
Endl;
700 Log() <<
"Create PDFs" <<
Endl;
725 bool truncate =
false;
726 TH1F*
h1 = regRes->QuadraticDeviation( tgtNum , truncate, 1.);
731 TH1F* h2 = regRes->QuadraticDeviation( tgtNum , truncate, yq[0]);
756 regRes->Resize( nEvents );
760 regRes->SetValue( vals, ievt );
764 Log() <<
kINFO <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 773 regRes->CreateDeviationHistograms( histNamePrefix );
786 if (!resMulticlass)
Log() <<
kFATAL<<
"unable to create pointer in AddMulticlassOutput, exiting."<<
Endl;
796 resMulticlass->
Resize( nEvents );
800 resMulticlass->SetValue( vals, ievt );
804 Log() <<
kINFO <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 822 if (errUpper) *errUpper=-1;
865 << (type==
Types::kTraining?
"training":
"testing") <<
" sample (" << nEvents <<
" events)" <<
Endl;
874 if (modulo <= 0 ) modulo = 1;
878 Log() <<
kINFO <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 905 mvaProb->
Resize( nEvents );
910 if (proba < 0)
break;
915 if (modulo <= 0 ) modulo = 1;
919 Log() <<
kINFO <<
"Elapsed time for evaluation of " << nEvents <<
" events: " 941 bias = 0; biasT = 0; dev = 0; devT = 0; rms = 0; rmsT = 0;
943 Double_t m1 = 0, m2 = 0, s1 = 0, s2 = 0, s12 = 0;
949 for (
Long64_t ievt=0; ievt<nevt; ievt++) {
973 m1 += t*w; s1 += t*t*w;
974 m2 += r*w; s2 += r*r*w;
987 corr = s12/sumw - m1*m2;
988 corr /=
TMath::Sqrt( (s1/sumw - m1*m1) * (s2/sumw - m2*m2) );
991 TH2F* hist =
new TH2F(
"hist",
"hist", 150, xmin, xmax, 100, xmin, xmax );
992 TH2F* histT =
new TH2F(
"histT",
"histT", 150, xmin, xmax, 100, xmin, xmax );
999 for (
Long64_t ievt=0; ievt<nevt; ievt++) {
1000 Float_t d = (rV[ievt] - tV[ievt]);
1001 hist->
Fill( rV[ievt], tV[ievt], wV[ievt] );
1002 if (d >= devMin && d <= devMax) {
1004 biasT += wV[ievt] * d;
1006 rmsT += wV[ievt] * d * d;
1007 histT->
Fill( rV[ievt], tV[ievt], wV[ievt] );
1035 if (!resMulticlass)
Log() <<
kFATAL<<
"unable to create pointer in TestMulticlass, exiting."<<
Endl;
1036 Log() <<
kINFO <<
"Determine optimal multiclass cuts for test data..." <<
Endl;
1058 <<
" not found in tree" <<
Endl;
1082 mvaRes->
Store(mva_s,
"MVA_S");
1083 mvaRes->
Store(mva_b,
"MVA_B");
1095 mvaRes->
Store(proba_s,
"Prob_S");
1096 mvaRes->
Store(proba_b,
"Prob_B");
1103 mvaRes->
Store(rarity_s,
"Rar_S");
1104 mvaRes->
Store(rarity_b,
"Rar_B");
1112 mvaRes->
Store(mva_eff_s,
"MVA_HIGHBIN_S");
1113 mvaRes->
Store(mva_eff_b,
"MVA_HIGHBIN_B");
1121 Log() <<
kINFO <<
"Loop over test events and fill histograms with classifier response..." <<
Endl;
1122 if (mvaProb)
Log() <<
kINFO <<
"Also filling probability and rarity histograms (on request)..." <<
Endl;
1130 mva_s ->
Fill( v, w );
1132 proba_s->
Fill( (*mvaProb)[ievt][0], w );
1136 mva_eff_s ->
Fill( v, w );
1139 mva_b ->
Fill( v, w );
1141 proba_b->
Fill( (*mvaProb)[ievt][0], w );
1144 mva_eff_b ->
Fill( v, w );
1174 tf << prefix <<
"#GEN -*-*-*-*-*-*-*-*-*-*-*- general info -*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1176 tf.setf(std::ios::left);
1181 tf << prefix <<
"Creator : " << userInfo->
fUser << std::endl;
1190 tf << prefix << std::endl;
1195 tf << prefix << std::endl << prefix <<
"#OPT -*-*-*-*-*-*-*-*-*-*-*-*- options -*-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1197 tf << prefix << std::endl;
1200 tf << prefix << std::endl << prefix <<
"#VAR -*-*-*-*-*-*-*-*-*-*-*-* variables *-*-*-*-*-*-*-*-*-*-*-*-" << std::endl << prefix << std::endl;
1202 tf << prefix << std::endl;
1235 if (!parent)
return;
1313 Log() <<
kINFO <<
"Creating weight file in xml format: " 1333 Log() <<
kINFO <<
"Reading weight file: " 1337 #if ROOT_VERSION_CODE >= ROOT_VERSION(5,29,0) 1348 fb.open(tfname.
Data(),std::ios::in);
1349 if (!fb.is_open()) {
1350 Log() <<
kFATAL <<
"<ReadStateFromFile> " 1351 <<
"Unable to open input weight file: " << tfname <<
Endl;
1353 std::istream fin(&fb);
1360 Log() <<
kINFO <<
"Reading root weight file: " 1371 #if ROOT_VERSION_CODE >= ROOT_VERSION(5,26,00) 1377 Log() <<
kFATAL <<
"Method MethodBase::ReadStateFromXMLString( const char* xmlstr = " 1378 << xmlstr <<
" ) is not available for ROOT versions prior to 5.26/00." <<
Endl;
1404 if (nodeName==
"GeneralInfo") {
1409 while (antypeNode) {
1412 if (
name ==
"TrainingTime")
1415 if (
name ==
"AnalysisType") {
1421 else Log() <<
kFATAL <<
"Analysis type " << val <<
" is not known." <<
Endl;
1424 if (
name ==
"TMVA Release" ||
name ==
"TMVA") {
1431 if (
name ==
"ROOT Release" ||
name ==
"ROOT") {
1440 else if (nodeName==
"Options") {
1445 else if (nodeName==
"Variables") {
1448 else if (nodeName==
"Spectators") {
1451 else if (nodeName==
"Classes") {
1454 else if (nodeName==
"Targets") {
1457 else if (nodeName==
"Transformations") {
1460 else if (nodeName==
"MVAPdfs") {
1475 else if (nodeName==
"Weights") {
1479 Log() <<
kWARNING <<
"Unparsed XML node: '" << nodeName <<
"'" <<
Endl;
1506 methodType = methodType(methodType.Last(
' '),methodType.Length());
1511 if (methodName ==
"") methodName = methodType;
1536 fin.getline(buf,512);
1564 Log() <<
kFATAL <<
"<ProcessOptions> Variable transform '" 1569 fin.getline(buf,512);
1573 varTrafo->ReadTransformationFromStream(fin, trafo );
1584 fin.getline(buf,512);
1598 fin.getline(buf,512);
1600 fin.getline(buf,512);
1631 istr >> dummy >> readNVar;
1635 <<
" while there are " << readNVar <<
" variables declared in the file" 1650 Log() <<
kINFO <<
"ERROR in <ReadVarsFromStream>" <<
Endl;
1651 Log() <<
kINFO <<
"The definition (or the order) of the variables found in the input file is" <<
Endl;
1652 Log() <<
kINFO <<
"is not the same as the one declared in the Reader (which is necessary for" <<
Endl;
1653 Log() <<
kINFO <<
"the correct working of the method):" <<
Endl;
1654 Log() <<
kINFO <<
" var #" << varIdx <<
" declared in Reader: " << varIt->GetExpression() <<
Endl;
1656 Log() <<
kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1710 for (
UInt_t iCls=0; iCls<nClasses; ++iCls) {
1746 <<
" while there are " << readNVar <<
" variables declared in the file" 1761 existingVarInfo = readVarInfo;
1764 Log() <<
kINFO <<
"ERROR in <ReadVariablesFromXML>" <<
Endl;
1765 Log() <<
kINFO <<
"The definition (or the order) of the variables found in the input file is" <<
Endl;
1766 Log() <<
kINFO <<
"not the same as the one declared in the Reader (which is necessary for the" <<
Endl;
1767 Log() <<
kINFO <<
"correct working of the method):" <<
Endl;
1770 Log() <<
kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1786 <<
" while there are " << readNSpec <<
" spectators declared in the file" 1801 existingSpecInfo = readSpecInfo;
1804 Log() <<
kINFO <<
"ERROR in <ReadSpectatorsFromXML>" <<
Endl;
1805 Log() <<
kINFO <<
"The definition (or the order) of the spectators found in the input file is" <<
Endl;
1806 Log() <<
kINFO <<
"not the same as the one declared in the Reader (which is necessary for the" <<
Endl;
1807 Log() <<
kINFO <<
"correct working of the method):" <<
Endl;
1810 Log() <<
kFATAL <<
"The expression declared to the Reader needs to be checked (name or order are wrong)" <<
Endl;
1829 for (
UInt_t icls = 0; icls<readNCls;++icls) {
1846 if (
DataInfo().GetClassInfo(
"Signal") != 0) {
1851 if (
DataInfo().GetClassInfo(
"Background") != 0) {
1889 Log() <<
kFATAL <<
"MethodBase::BaseDir() - MethodBaseDir() return a NULL pointer!" <<
Endl;
1910 wfilePath.Write(
"TrainingPath" );
1911 wfileName.Write(
"WeightFileName" );
1937 Log()<<
kDEBUG<<
"Return from MethodBaseDir() after creating base directory "<<
Endl;
1969 return ( wFileDir + (wFileDir[wFileDir.
Length()-1]==
'/' ?
"" :
"/")
1996 Log() <<
kFATAL <<
"<WriteEvaluationHistosToFile> Unknown result: " 1998 <<
"/kMaxAnalysisType" <<
Endl;
2020 fin.getline(buf,512);
2026 std::stringstream s(code.
Data());
2034 std::stringstream s(code.
Data());
2042 std::stringstream s(code.
Data());
2043 std::string analysisType;
2048 else Log() <<
kFATAL <<
"Analysis type " << analysisType <<
" from weight-file not known!" << std::endl;
2050 Log() <<
kINFO <<
"Method was trained for " 2070 if (mvaRes==0 || mvaRes->
GetSize()==0) {
2071 Log() <<
kERROR<<
"<CreateMVAPdfs> No result of classifier testing available" <<
Endl;
2085 histMVAPdfS->Sumw2();
2086 histMVAPdfB->
Sumw2();
2094 else histMVAPdfB->
Fill( theVal, theWeight );
2101 histMVAPdfS->Write();
2102 histMVAPdfB->
Write();
2110 if (
DataInfo().GetNClasses() == 2) {
2112 <<
Form(
"<CreateMVAPdfs> Separation from histogram (PDF): %1.3f (%1.3f)",
2126 Log() <<
kINFO <<
"<GetProba> MVA PDFs for Signal and Background don't exist yet, we'll create them on demand" <<
Endl;
2132 return GetProba(mvaVal,sigFraction);
2141 Log() <<
kWARNING <<
"<GetProba> MVA PDFs for Signal and Background don't exist" <<
Endl;
2147 Double_t denom = p_s*ap_sig + p_b*(1 - ap_sig);
2149 return (denom > 0) ? (p_s*ap_sig) / denom : -1;
2160 Log() <<
kWARNING <<
"<GetRarity> Required MVA PDF for Signal or Backgroud does not exist: " 2161 <<
"select option \"CreateMVAPdfs\"" <<
Endl;
2185 if (!list || list->
GetSize() < 2) computeArea = kTRUE;
2186 else if (list->
GetSize() > 2) {
2187 Log() <<
kFATAL <<
"<GetEfficiency> Wrong number of arguments" 2188 <<
" in string: " << theString
2189 <<
" | required format, e.g., Efficiency:0.05, or empty string" <<
Endl;
2197 Log() <<
kFATAL <<
"<GetEfficiency> Binning mismatch between signal and background histos" <<
Endl;
2205 TH1 * effhist = results->
GetHist(
"MVA_HIGHBIN_S");
2212 if (results->
DoesExist(
"MVA_EFF_S")==0) {
2217 results->
Store(eff_s,
"MVA_EFF_S");
2218 results->
Store(eff_b,
"MVA_EFF_B");
2230 Float_t theVal = (*mvaRes)[ievt];
2233 TH1* theHist = isSignal ? eff_s : eff_b;
2236 if (isSignal) nevtS+=theWeight;
2240 if (sign > 0 && maxbin >
fNbinsH)
continue;
2241 if (sign < 0 && maxbin < 1 )
continue;
2242 if (sign > 0 && maxbin < 1 ) maxbin = 1;
2250 Log() <<
kFATAL <<
"<GetEfficiency> Mismatch in sign" <<
Endl;
2262 results->
Store(eff_BvsS,
"MVA_EFF_BvsS");
2268 results->
Store(rej_BvsS);
2270 rej_BvsS->
SetYTitle(
"Backgr rejection (1-eff)" );
2275 results->
Store(inveff_BvsS);
2277 inveff_BvsS->
SetYTitle(
"Inverse backgr. eff (1/eff)" );
2322 Double_t effS = 0., rejB, effS_ = 0., rejB_ = 0.;
2323 Int_t nbins_ = 5000;
2324 for (
Int_t bini=1; bini<=nbins_; bini++) {
2327 effS = (bini - 0.5)/
Float_t(nbins_);
2331 if ((effS - rejB)*(effS_ - rejB_) < 0)
break;
2349 Double_t effS = 0, effB = 0, effS_ = 0, effB_ = 0;
2350 Int_t nbins_ = 1000;
2356 for (
Int_t bini=1; bini<=nbins_; bini++) {
2359 effS = (bini - 0.5)/
Float_t(nbins_);
2361 integral += (1.0 - effB);
2375 for (
Int_t bini=1; bini<=nbins_; bini++) {
2378 effS = (bini - 0.5)/
Float_t(nbins_);
2382 if ((effB - effBref)*(effB_ - effBref) <= 0)
break;
2388 effS = 0.5*(effS + effS_);
2391 if (nevtS > 0) effSerr =
TMath::Sqrt( effS*(1.0 - effS)/nevtS );
2416 Log() <<
kFATAL <<
"<GetTrainingEfficiency> Wrong number of arguments" 2417 <<
" in string: " << theString
2418 <<
" | required format, e.g., Efficiency:0.05" <<
Endl;
2431 Log() <<
kFATAL <<
"<GetTrainingEfficiency> Binning mismatch between signal and background histos" 2439 TH1 * effhist = results->
GetHist(
"MVA_HIGHBIN_S");
2444 if (results->
DoesExist(
"MVA_TRAIN_S")==0) {
2452 results->
Store(mva_s_tr,
"MVA_TRAIN_S");
2453 results->
Store(mva_b_tr,
"MVA_TRAIN_B");
2462 results->
Store(mva_eff_tr_s,
"MVA_TRAINEFF_S");
2463 results->
Store(mva_eff_tr_b,
"MVA_TRAINEFF_B");
2480 theClsHist->
Fill( theVal, theWeight );
2484 if (sign > 0 && maxbin >
fNbinsH)
continue;
2485 if (sign < 0 && maxbin < 1 )
continue;
2486 if (sign > 0 && maxbin < 1 ) maxbin = 1;
2489 if (sign > 0)
for (
Int_t ibin=1; ibin<=maxbin; ibin++) theEffHist->
AddBinContent( ibin , theWeight );
2506 results->
Store(eff_bvss,
"EFF_BVSS_TR");
2507 results->
Store(rej_bvss,
"REJ_BVSS_TR");
2557 Double_t effS = 0., effB, effS_ = 0., effB_ = 0.;
2558 Int_t nbins_ = 1000;
2559 for (
Int_t bini=1; bini<=nbins_; bini++) {
2562 effS = (bini - 0.5)/
Float_t(nbins_);
2566 if ((effB - effBref)*(effB_ - effBref) <= 0)
break;
2571 return 0.5*(effS + effS_);
2581 if (!resMulticlass)
Log() <<
kFATAL<<
"unable to create pointer in GetMulticlassEfficiency, exiting."<<
Endl;
2593 if (!resMulticlass)
Log() <<
kFATAL<<
"unable to create pointer in GetMulticlassTrainingEfficiency, exiting."<<
Endl;
2595 Log() <<
kINFO <<
"Determine optimal multiclass cuts for training data..." <<
Endl;
2633 if ((!pdfS && pdfB) || (pdfS && !pdfB))
2634 Log() <<
kFATAL <<
"<GetSeparation> Mismatch in pdfs" <<
Endl;
2635 if (!pdfS) pdfS =
fSplS;
2636 if (!pdfB) pdfB =
fSplB;
2639 Log()<<
kWARNING<<
"could not calculate the separation, distributions" 2640 <<
" fSplS or fSplB are not yet filled" <<
Endl;
2655 if ((!histS && histB) || (histS && !histB))
2656 Log() <<
kFATAL <<
"<GetROCIntegral(TH1D*, TH1D*)> Mismatch in hists" <<
Endl;
2658 if (histS==0 || histB==0)
return 0.;
2671 for (
UInt_t i=0; i<nsteps; i++) {
2675 return integral*step;
2687 if ((!pdfS && pdfB) || (pdfS && !pdfB))
2688 Log() <<
kFATAL <<
"<GetSeparation> Mismatch in pdfs" <<
Endl;
2689 if (!pdfS) pdfS =
fSplS;
2690 if (!pdfB) pdfB =
fSplB;
2692 if (pdfS==0 || pdfB==0)
return 0.;
2701 for (
UInt_t i=0; i<nsteps; i++) {
2705 return integral*step;
2715 Double_t& max_significance_value )
const 2720 Double_t effS(0),effB(0),significance(0);
2723 if (SignalEvents <= 0 || BackgroundEvents <= 0) {
2724 Log() <<
kFATAL <<
"<GetMaximumSignificance> " 2725 <<
"Number of signal or background events is <= 0 ==> abort" 2729 Log() <<
kINFO <<
"Using ratio SignalEvents/BackgroundEvents = " 2730 << SignalEvents/BackgroundEvents <<
Endl;
2735 if ( (eff_s==0) || (eff_b==0) ) {
2746 significance =
sqrt(SignalEvents)*( effS )/
sqrt( effS + ( BackgroundEvents / SignalEvents) * effB );
2756 delete temp_histogram;
2758 Log() <<
kINFO <<
"Optimal cut at : " << max_significance <<
Endl;
2759 Log() <<
kINFO <<
"Maximum significance: " << max_significance_value <<
Endl;
2761 return max_significance;
2781 Log() <<
kFATAL <<
"<CalculateEstimator> Wrong tree type: " << treeType <<
Endl;
2800 for (
Int_t ievt = 0; ievt < entries; ievt++) {
2809 meanS += weight*theVar;
2810 rmsS += weight*theVar*theVar;
2814 meanB += weight*theVar;
2815 rmsB += weight*theVar*theVar;
2823 meanS = meanS/sumwS;
2824 meanB = meanB/sumwB;
2838 if (theClassFileName ==
"")
2841 classFileName = theClassFileName;
2845 TString tfname( classFileName );
2846 Log() <<
kINFO <<
"Creating standalone response class: " 2849 std::ofstream fout( classFileName );
2851 Log() <<
kFATAL <<
"<MakeClass> Unable to open file: " << classFileName <<
Endl;
2856 fout <<
"// Class: " << className << std::endl;
2857 fout <<
"// Automatically generated by MethodBase::MakeClass" << std::endl <<
"//" << std::endl;
2861 fout <<
"/* configuration options =====================================================" << std::endl << std::endl;
2864 fout <<
"============================================================================ */" << std::endl;
2867 fout <<
"" << std::endl;
2868 fout <<
"#include <vector>" << std::endl;
2869 fout <<
"#include <cmath>" << std::endl;
2870 fout <<
"#include <string>" << std::endl;
2871 fout <<
"#include <iostream>" << std::endl;
2872 fout <<
"" << std::endl;
2877 fout <<
"#ifndef IClassifierReader__def" << std::endl;
2878 fout <<
"#define IClassifierReader__def" << std::endl;
2880 fout <<
"class IClassifierReader {" << std::endl;
2882 fout <<
" public:" << std::endl;
2884 fout <<
" // constructor" << std::endl;
2885 fout <<
" IClassifierReader() : fStatusIsClean( true ) {}" << std::endl;
2886 fout <<
" virtual ~IClassifierReader() {}" << std::endl;
2888 fout <<
" // return classifier response" << std::endl;
2889 fout <<
" virtual double GetMvaValue( const std::vector<double>& inputValues ) const = 0;" << std::endl;
2891 fout <<
" // returns classifier status" << std::endl;
2892 fout <<
" bool IsStatusClean() const { return fStatusIsClean; }" << std::endl;
2894 fout <<
" protected:" << std::endl;
2896 fout <<
" bool fStatusIsClean;" << std::endl;
2897 fout <<
"};" << std::endl;
2899 fout <<
"#endif" << std::endl;
2901 fout <<
"class " << className <<
" : public IClassifierReader {" << std::endl;
2903 fout <<
" public:" << std::endl;
2905 fout <<
" // constructor" << std::endl;
2906 fout <<
" " << className <<
"( std::vector<std::string>& theInputVars ) " << std::endl;
2907 fout <<
" : IClassifierReader()," << std::endl;
2908 fout <<
" fClassName( \"" << className <<
"\" )," << std::endl;
2909 fout <<
" fNvars( " <<
GetNvar() <<
" )," << std::endl;
2910 fout <<
" fIsNormalised( " << (
IsNormalised() ?
"true" :
"false") <<
" )" << std::endl;
2911 fout <<
" { " << std::endl;
2912 fout <<
" // the training input variables" << std::endl;
2913 fout <<
" const char* inputVars[] = { ";
2916 if (ivar<
GetNvar()-1) fout <<
", ";
2918 fout <<
" };" << std::endl;
2920 fout <<
" // sanity checks" << std::endl;
2921 fout <<
" if (theInputVars.size() <= 0) {" << std::endl;
2922 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": empty input vector\" << std::endl;" << std::endl;
2923 fout <<
" fStatusIsClean = false;" << std::endl;
2924 fout <<
" }" << std::endl;
2926 fout <<
" if (theInputVars.size() != fNvars) {" << std::endl;
2927 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in number of input values: \"" << std::endl;
2928 fout <<
" << theInputVars.size() << \" != \" << fNvars << std::endl;" << std::endl;
2929 fout <<
" fStatusIsClean = false;" << std::endl;
2930 fout <<
" }" << std::endl;
2932 fout <<
" // validate input variables" << std::endl;
2933 fout <<
" for (size_t ivar = 0; ivar < theInputVars.size(); ivar++) {" << std::endl;
2934 fout <<
" if (theInputVars[ivar] != inputVars[ivar]) {" << std::endl;
2935 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": mismatch in input variable names\" << std::endl" << std::endl;
2936 fout <<
" << \" for variable [\" << ivar << \"]: \" << theInputVars[ivar].c_str() << \" != \" << inputVars[ivar] << std::endl;" << std::endl;
2937 fout <<
" fStatusIsClean = false;" << std::endl;
2938 fout <<
" }" << std::endl;
2939 fout <<
" }" << std::endl;
2941 fout <<
" // initialize min and max vectors (for normalisation)" << std::endl;
2943 fout <<
" fVmin[" << ivar <<
"] = " << std::setprecision(15) <<
GetXmin( ivar ) <<
";" << std::endl;
2944 fout <<
" fVmax[" << ivar <<
"] = " << std::setprecision(15) <<
GetXmax( ivar ) <<
";" << std::endl;
2947 fout <<
" // initialize input variable types" << std::endl;
2952 fout <<
" // initialize constants" << std::endl;
2953 fout <<
" Initialize();" << std::endl;
2956 fout <<
" // initialize transformation" << std::endl;
2957 fout <<
" InitTransform();" << std::endl;
2959 fout <<
" }" << std::endl;
2961 fout <<
" // destructor" << std::endl;
2962 fout <<
" virtual ~" << className <<
"() {" << std::endl;
2963 fout <<
" Clear(); // method-specific" << std::endl;
2964 fout <<
" }" << std::endl;
2966 fout <<
" // the classifier response" << std::endl;
2967 fout <<
" // \"inputValues\" is a vector of input values in the same order as the " << std::endl;
2968 fout <<
" // variables given to the constructor" << std::endl;
2969 fout <<
" double GetMvaValue( const std::vector<double>& inputValues ) const;" << std::endl;
2971 fout <<
" private:" << std::endl;
2973 fout <<
" // method-specific destructor" << std::endl;
2974 fout <<
" void Clear();" << std::endl;
2977 fout <<
" // input variable transformation" << std::endl;
2979 fout <<
" void InitTransform();" << std::endl;
2980 fout <<
" void Transform( std::vector<double> & iv, int sigOrBgd ) const;" << std::endl;
2983 fout <<
" // common member variables" << std::endl;
2984 fout <<
" const char* fClassName;" << std::endl;
2986 fout <<
" const size_t fNvars;" << std::endl;
2987 fout <<
" size_t GetNvar() const { return fNvars; }" << std::endl;
2988 fout <<
" char GetType( int ivar ) const { return fType[ivar]; }" << std::endl;
2990 fout <<
" // normalisation of input variables" << std::endl;
2991 fout <<
" const bool fIsNormalised;" << std::endl;
2992 fout <<
" bool IsNormalised() const { return fIsNormalised; }" << std::endl;
2993 fout <<
" double fVmin[" <<
GetNvar() <<
"];" << std::endl;
2994 fout <<
" double fVmax[" <<
GetNvar() <<
"];" << std::endl;
2995 fout <<
" double NormVariable( double x, double xmin, double xmax ) const {" << std::endl;
2996 fout <<
" // normalise to output range: [-1, 1]" << std::endl;
2997 fout <<
" return 2*(x - xmin)/(xmax - xmin) - 1.0;" << std::endl;
2998 fout <<
" }" << std::endl;
3000 fout <<
" // type of input variable: 'F' or 'I'" << std::endl;
3001 fout <<
" char fType[" <<
GetNvar() <<
"];" << std::endl;
3003 fout <<
" // initialize internal variables" << std::endl;
3004 fout <<
" void Initialize();" << std::endl;
3005 fout <<
" double GetMvaValue__( const std::vector<double>& inputValues ) const;" << std::endl;
3006 fout <<
"" << std::endl;
3007 fout <<
" // private members (method specific)" << std::endl;
3012 fout <<
" inline double " << className <<
"::GetMvaValue( const std::vector<double>& inputValues ) const" << std::endl;
3013 fout <<
" {" << std::endl;
3014 fout <<
" // classifier response value" << std::endl;
3015 fout <<
" double retval = 0;" << std::endl;
3017 fout <<
" // classifier response, sanity check first" << std::endl;
3018 fout <<
" if (!IsStatusClean()) {" << std::endl;
3019 fout <<
" std::cout << \"Problem in class \\\"\" << fClassName << \"\\\": cannot return classifier response\"" << std::endl;
3020 fout <<
" << \" because status is dirty\" << std::endl;" << std::endl;
3021 fout <<
" retval = 0;" << std::endl;
3022 fout <<
" }" << std::endl;
3023 fout <<
" else {" << std::endl;
3024 fout <<
" if (IsNormalised()) {" << std::endl;
3025 fout <<
" // normalise variables" << std::endl;
3026 fout <<
" std::vector<double> iV;" << std::endl;
3027 fout <<
" iV.reserve(inputValues.size());" << std::endl;
3028 fout <<
" int ivar = 0;" << std::endl;
3029 fout <<
" for (std::vector<double>::const_iterator varIt = inputValues.begin();" << std::endl;
3030 fout <<
" varIt != inputValues.end(); varIt++, ivar++) {" << std::endl;
3031 fout <<
" iV.push_back(NormVariable( *varIt, fVmin[ivar], fVmax[ivar] ));" << std::endl;
3032 fout <<
" }" << std::endl;
3036 fout <<
" Transform( iV, -1 );" << std::endl;
3038 fout <<
" retval = GetMvaValue__( iV );" << std::endl;
3039 fout <<
" }" << std::endl;
3040 fout <<
" else {" << std::endl;
3044 fout <<
" std::vector<double> iV;" << std::endl;
3045 fout <<
" int ivar = 0;" << std::endl;
3046 fout <<
" for (std::vector<double>::const_iterator varIt = inputValues.begin();" << std::endl;
3047 fout <<
" varIt != inputValues.end(); varIt++, ivar++) {" << std::endl;
3048 fout <<
" iV.push_back(*varIt);" << std::endl;
3049 fout <<
" }" << std::endl;
3050 fout <<
" Transform( iV, -1 );" << std::endl;
3051 fout <<
" retval = GetMvaValue__( iV );" << std::endl;
3054 fout <<
" retval = GetMvaValue__( inputValues );" << std::endl;
3056 fout <<
" }" << std::endl;
3057 fout <<
" }" << std::endl;
3059 fout <<
" return retval;" << std::endl;
3060 fout <<
" }" << std::endl;
3076 std::streambuf* cout_sbuf = std::cout.rdbuf();
3077 std::ofstream* o = 0;
3078 if (
gConfig().WriteOptionsReference()) {
3084 std::cout.rdbuf( o->rdbuf() );
3091 <<
"================================================================" 3095 <<
"H e l p f o r M V A m e t h o d [ " <<
GetName() <<
" ] :" 3100 Log() <<
"Help for MVA method [ " <<
GetName() <<
" ] :" <<
Endl;
3108 Log() <<
"<Suppress this message by specifying \"!H\" in the booking option>" <<
Endl;
3110 <<
"================================================================" 3117 Log() <<
"# End of Message___" <<
Endl;
3120 std::cout.rdbuf( cout_sbuf );
3228 if (mvaRes !=
NULL) {
3231 TH1D *mva_s_tr =
dynamic_cast<TH1D*
> (mvaRes->
GetHist(
"MVA_TRAIN_S"));
3232 TH1D *mva_b_tr =
dynamic_cast<TH1D*
> (mvaRes->
GetHist(
"MVA_TRAIN_B"));
3234 if ( !mva_s || !mva_b || !mva_s_tr || !mva_b_tr)
return -1;
3236 if (SorB ==
's' || SorB ==
'S')
virtual void DeclareOptions()=0
Bool_t HasMVAPdfs() const
Types::EAnalysisType fAnalysisType
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write this object to the current directory.
virtual void AddClassifierOutputProb(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
virtual Int_t FindBin(Double_t x, Double_t y=0, Double_t z=0)
Return Global bin number corresponding to x,y,z.
virtual void Scale(Double_t c1=1, Option_t *option="")
Multiply this histogram by a constant c1.
virtual Int_t Fill(Double_t x)
Increment bin with abscissa X by 1.
void WriteStateToXML(void *parent) const
general method used in writing the header of the weight files where the used variables, variable transformation type etc.
virtual void SetTuneParameters(std::map< TString, Double_t > tuneParameters)
set the tuning parameters accoding to the argument This is just a dummy .
virtual void MakeClass(const TString &classFileName=TString("")) const
create reader class for method (classification only at present)
virtual Double_t GetMaximum(Double_t maxval=FLT_MAX) const
Return maximum value smaller than maxval of bins in the range, unless the value has been overridden b...
UInt_t GetNVariables() const
static TDirectory * RootBaseDir()
virtual Double_t GetBinCenter(Int_t bin) const
return bin center for 1D historam Better to use h1.GetXaxis().GetBinCenter(bin)
virtual const std::vector< Float_t > & GetMulticlassValues()
#define TMVA_VERSION_CODE
void SetCurrentEvent(Long64_t ievt) const
MsgLogger & Endl(MsgLogger &ml)
Bool_t GetLine(std::istream &fin, char *buf)
reads one line from the input stream checks for certain keywords and interprets the line if keywords ...
void ReadOptionsFromXML(void *node)
void ReadXML(void *pdfnode)
XML file reading.
void AddOutput(Types::ETreeType type, Types::EAnalysisType analysisType)
VariableInfo & AddTarget(const TString &expression, const TString &title, const TString &unit, Double_t min, Double_t max, Bool_t normalized=kTRUE, void *external=0)
add a variable (can be a complex expression) to the set of variables used in the MV analysis ...
static void CreateVariableTransforms(const TString &trafoDefinition, TMVA::DataSetInfo &dataInfo, TMVA::TransformationHandler &transformationHandler, TMVA::MsgLogger &log)
create variable transformations
Bool_t fIgnoreNegWeightsInTraining
virtual const char * WorkingDirectory()
Return working directory.
void ReadStateFromXML(void *parent)
std::vector< VariableInfo > & GetSpectatorInfos()
void WriteVarsToStream(std::ostream &tf, const TString &prefix="") const
write the list of variables (name, min, max) for a given data transformation method to the stream ...
virtual Double_t GetMvaValue(Double_t *errLower=0, Double_t *errUpper=0)=0
virtual void MakeClassSpecificHeader(std::ostream &, const TString &="") const
void variables(TString fin="TMVA.root", TString dirName="InputVariables_Id", TString title="TMVA Input Variables", Bool_t isRegression=kFALSE, Bool_t useTMVAStyle=kTRUE)
Collectable string class.
void ReadOptionsFromStream(std::istream &istr)
read option back from the weight file
TString & ReplaceAll(const TString &s1, const TString &s2)
TH1 * GetSmoothedHist() const
virtual const char * GetBuildNode() const
Return the build node name.
void BuildPDF(const TH1 *theHist)
virtual void WriteEvaluationHistosToFile(Types::ETreeType treetype)
writes all MVA evaluation histograms to file
const TString & GetOriginalVarName(Int_t ivar) const
virtual std::map< TString, Double_t > OptimizeTuningParameters(TString fomType="ROCIntegral", TString fitType="FitGA")
call the Optimzier with the set of paremeters and ranges that are meant to be tuned.
TString fVariableTransformTypeString
XMLDocPointer_t NewDoc(const char *version="1.0")
creates new xml document with provided version
OptionBase * DeclareOptionRef(T &ref, const TString &name, const TString &desc="")
TransformationHandler * fTransformationPointer
Types::ESBType fVariableTransformType
virtual Double_t GetBinContent(Int_t bin) const
Return content of bin number bin.
A ROOT file is a suite of consecutive data records (TKey instances) with a well defined format...
virtual Double_t Eval(Double_t x) const
returns linearly interpolated TGraph entry around x
virtual Int_t GetEntries() const
virtual Int_t GetQuantiles(Int_t nprobSum, Double_t *q, const Double_t *probSum=0)
Compute Quantiles for this histogram Quantile x_q of a probability distribution Function F is defined...
virtual int MakeDirectory(const char *name)
Make a directory.
virtual TObject * FindObject(const char *name) const
Find object by name in the list of memory objects.
virtual TObject * Get(const char *namecycle)
Return pointer to object identified by namecycle.
virtual Double_t GetMean(Int_t axis=1) const
For axis = 1,2 or 3 returns the mean value of the histogram along X,Y or Z axis.
Ssiz_t Index(const char *pat, Ssiz_t i=0, ECaseCompare cmp=kExact) const
virtual const std::vector< Float_t > & GetRegressionValues()
const TString & GetReferenceFile() const
static Bool_t AddDirectoryStatus()
static function: cannot be inlined on Windows/NT
1-D histogram with a float per channel (see TH1 documentation)}
void SetTrainTime(Double_t trainTime)
TransformationHandler & GetTransformationHandler(Bool_t takeReroutedIfAvailable=true)
Short_t Min(Short_t a, Short_t b)
void ToLower()
Change string to lower-case.
virtual Double_t GetKSTrainingVsTest(Char_t SorB, TString opt="X")
virtual void SetYTitle(const char *title)
virtual TDirectory * mkdir(const char *name, const char *title="")
Create a sub-directory and return a pointer to the created directory.
MethodBase(const TString &jobName, Types::EMVA methodType, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption="", TDirectory *theBaseDir=0)
standard constructur
virtual void TestMulticlass()
test multiclass classification
TString GetTrainingROOTVersionString() const
calculates the ROOT version string from the training version code on the fly
UInt_t GetNClasses() const
UInt_t GetNTargets() const
const std::vector< Event * > & GetEventCollection(Types::ETreeType type=Types::kMaxTreeType) const
std::vector< TString > * fInputVars
virtual void GetRegressionDeviation(UInt_t tgtNum, Types::ETreeType type, Double_t &stddev, Double_t &stddev90Percent) const
#define ROOT_VERSION_CODE
void ReadTargetsFromXML(void *tarnode)
read target info from XML
virtual Double_t GetMaximumSignificance(Double_t SignalEvents, Double_t BackgroundEvents, Double_t &optimal_significance_value) const
plot significance, S/Sqrt(S^2 + B^2), curve for given number of signal and background events; returns...
void AddInfoItem(void *gi, const TString &name, const TString &value) const
xml writing
virtual Double_t GetROCIntegral(TH1D *histS, TH1D *histB) const
calculate the area (integral) under the ROC curve as a overall quality measure of the classification ...
TDirectory * MethodBaseDir() const
returns the ROOT directory where all instances of the corresponding MVA method are stored ...
Double_t GetTrainingSumSignalWeights()
void FreeDoc(XMLDocPointer_t xmldoc)
frees allocated document data and deletes document itself
Double_t GetTrainTime() const
virtual Bool_t IsSignalLike()
uses a pre-set cut on the MVA output (SetSignalReferenceCut and SetSignalReferenceCutOrientation) for...
void CreateMVAPdfs()
Create PDFs of the MVA output variables.
static void AddDirectory(Bool_t add=kTRUE)
Sets the flag controlling the automatic add of histograms in memory.
TString & Replace(Ssiz_t pos, Ssiz_t n, const char *s)
void ReadVariablesFromXML(void *varnode)
read variable info from XML
const TString & GetExpression() const
static TFile * Open(const char *name, Option_t *option="", const char *ftitle="", Int_t compress=1, Int_t netopt=0)
Create / open a file.
const TString & GetWeightFileDir() const
void WriteStateToFile() const
write options and weights to file note that each one text file for the main configuration information...
static void SetIsTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
TString GetTrainingTMVAVersionString() const
calculates the TMVA version string from the training version code on the fly
DataSetInfo & fDataSetInfo
set to true in order to suppress writing to XML
UInt_t TreeIndex(Types::ETreeType type) const
ECutOrientation fCutOrientation
const TString & GetName() const
virtual ~MethodBase()
destructor
void DocSetRootElement(XMLDocPointer_t xmldoc, XMLNodePointer_t xmlnode)
set main (root) node for document
UInt_t GetTrainingTMVAVersionCode() const
const Event * GetEvent() const
void ReadStateFromFile()
Function to write options and weights to file.
Types::ETreeType GetCurrentType() const
std::vector< Float_t > * GetValueVector()
virtual void AddClassifierOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
virtual Double_t GetRarity(Double_t mvaVal, Types::ESBType reftype=Types::kBackground) const
compute rarity: R(x) = Integrate_[-oo..x] { PDF(x') dx' } where PDF(x) is the PDF of the classifier's...
void PrintHelpMessage() const
prints out method-specific help method
void ReadClassesFromXML(void *clsnode)
read number of classes from XML
void ResetThisBase()
reset required for RootFinder
Bool_t EndsWith(const char *pat, ECaseCompare cmp=kExact) const
Return true if string ends with the specified string.
virtual void ParseOptions()
options parser
Double_t GetXmin(Int_t ivar) const
void SetupMethod()
setup of methods
DataSetInfo & DataInfo() const
void SetOptions(const TString &s)
virtual UserGroup_t * GetUserInfo(Int_t uid)
Returns all user info in the UserGroup_t structure.
Bool_t DoRegression() const
XMLDocPointer_t ParseString(const char *xmlstring)
parses content of string and tries to produce xml structures
void SetMinType(EMsgType minType)
Ssiz_t First(char c) const
Find first occurrence of a character c.
Bool_t DoesExist(const TString &alias) const
virtual void ProcessOptions()=0
virtual Double_t GetProba(const Event *ev)
virtual void AddBinContent(Int_t bin)
Increment bin content by 1.
Double_t GetWeight() const
return the event weight - depending on whether the flag IgnoreNegWeightsInTraining is or not...
Long64_t GetNTrainingEvents() const
std::vector< VariableInfo > & GetTargetInfos()
virtual Double_t GetEfficiency(const TString &, Types::ETreeType, Double_t &err)
fill background efficiency (resp.
virtual std::vector< Float_t > GetMulticlassEfficiency(std::vector< std::vector< Float_t > > &purity)
Double_t GetEffForRoot(Double_t)
returns efficiency as function of cut
Double_t Root(Double_t refValue)
Root finding using Brents algorithm; taken from CERNLIB function RZERO.
virtual void AddWeightsXMLTo(void *parent) const =0
UInt_t fTMVATrainingVersion
UInt_t GetNEvents() const
temporary event when testing on a different DataSet than the own one
TString GetElapsedTime(Bool_t Scientific=kTRUE)
Double_t GetXmax(Int_t ivar) const
TransformationHandler fTransformation
void ReadStateFromXMLString(const char *xmlstr)
for reading from memory
Bool_t DoMulticlass() const
virtual Double_t KolmogorovTest(const TH1 *h2, Option_t *option="") const
Statistical test of compatibility in shape between this histogram and h2, using Kolmogorov test...
virtual void MakeClassSpecific(std::ostream &, const TString &="") const
virtual void ReadWeightsFromXML(void *wghtnode)=0
Int_t GetHistNBins(Int_t evtNum=0)
void SaveDoc(XMLDocPointer_t xmldoc, const char *filename, Int_t layout=1)
store document content to file if layout<=0, no any spaces or newlines will be placed between xmlnode...
TString fWeightFileExtension
void * GetExternalLink() const
Float_t GetTarget(UInt_t itgt) const
void WriteStateToStream(std::ostream &tf) const
general method used in writing the header of the weight files where the used variables, variable transformation type etc.
Results * GetResults(const TString &, Types::ETreeType type, Types::EAnalysisType analysistype)
TString info(resultsName+"/"); switch(type) { case Types::kTraining: info += "kTraining/"; break; cas...
Double_t length(const TVector2 &v)
Class to manage histogram axis.
R__EXTERN TSystem * gSystem
TDirectory * fMethodBaseDir
UInt_t fROOTTrainingVersion
const char * GetName() const
Returns name of object.
ClassInfo * GetClassInfo(Int_t clNum) const
void ReadVarsFromStream(std::istream &istr)
Read the variables (name, min, max) for a given data transformation method from the stream...
void AddClassesXMLTo(void *parent) const
write class info to XML
const Int_t NBIN_HIST_HIGH
2-D histogram with a float per channel (see TH1 documentation)}
class TMVA::Config::VariablePlotting fVariablePlotting
void Statistics(Types::ETreeType treeType, const TString &theVarName, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &, Double_t &)
calculates rms,mean, xmin, xmax of the event variable this can be either done for the variables as th...
virtual Bool_t InheritsFrom(const char *classname) const
Returns kTRUE if object inherits from class "classname".
Bool_t BeginsWith(const char *s, ECaseCompare cmp=kExact) const
virtual void SetBinContent(Int_t bin, Double_t content)
Set bin content see convention for numbering bins in TH1::GetBin In case the bin number is greater th...
Float_t GetAchievablePur(UInt_t cls)
void SetReadingVersion(UInt_t rv)
void SetValue(Float_t value, Int_t ievt)
set MVA response
UInt_t GetTrainingROOTVersionCode() const
Double_t ElapsedSeconds(void)
computes elapsed tim in seconds
char * Form(const char *fmt,...)
void ReadFromXML(void *varnode)
read VariableInfo from stream
TSubString Strip(EStripType s=kTrailing, char c=' ') const
Return a substring of self stripped at beginning and/or end.
const TString & GetJobName() const
const TString & GetMethodName() const
UInt_t GetNSpectators(bool all=kTRUE) const
virtual Double_t Eval(Double_t x) const =0
TSpline * fSplTrainEffBvsS
virtual TObject * At(Int_t idx) const
Returns the object at position idx. Returns 0 if idx is out of range.
static MethodBase *& GetThisBaseThreadLocal()
void DeclareBaseOptions()
define the options (their key words) that can be set in the option string here the options valid for ...
1-D histogram with a double per channel (see TH1 documentation)}
virtual Double_t GetSignificance() const
compute significance of mean difference significance = |<S> - |/Sqrt(RMS_S2 + RMS_B2) ...
TString GetWeightFileName() const
retrieve weight file name
Double_t GetSignalReferenceCutOrientation() const
void ProcessBaseOptions()
the option string is decoded, for availabel options see "DeclareOptions"
Int_t FindVarIndex(const TString &) const
find variable by name
UInt_t GetNVariables() const
std::vector< const std::vector< TMVA::Event * > * > fEventCollections
void AddSpectatorsXMLTo(void *parent) const
write spectator info to XML
Float_t GetValue(UInt_t ivar) const
return value of i'th variable
TString fVerbosityLevelString
void DeclareOptions()
define the options (their key words) that can be set in the option string know options: PDFInterpol[i...
TList * GetStorage() const
TString & Remove(Ssiz_t pos)
static void SetIgnoreNegWeightsInTraining(Bool_t)
when this static function is called, it sets the flag whether events with negative event weight shoul...
XMLDocPointer_t ParseFile(const char *filename, Int_t maxbuf=100000)
Parses content of file and tries to produce xml structures.
const std::vector< TMVA::Event * > & GetEventCollection(Types::ETreeType type)
returns the event collection (i.e.
virtual void CheckSetup()
check may be overridden by derived class (sometimes, eg, fitters are used which can only be implement...
void SetTestTime(Double_t testTime)
virtual void AddRegressionOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
std::vector< Double_t > GetBestMultiClassCuts(UInt_t targetClass)
void SetWeightFileName(TString)
set the weight file name (depreciated)
virtual Double_t GetSeparation(TH1 *, TH1 *) const
compute "separation" defined as <s2> = (1/2) Int_-oo..+oo { (S(x) - B(x))^2/(S(x) + B(x)) dx } ...
Describe directory structure in memory.
std::vector< Float_t > * fMulticlassReturnVal
Bool_t IsNormalised() const
Double_t GetTrainingSumBackgrWeights()
TH1 * GetHist(const TString &alias) const
virtual Int_t Sizeof() const
Returns size string will occupy on I/O buffer.
static RooMathCoreReg dummy
virtual void GetHelpMessage() const =0
void SetCurrentType(Types::ETreeType type) const
void AddVarsXMLTo(void *parent) const
write variable info to XML
Bool_t Contains(const char *pat, ECaseCompare cmp=kExact) const
const Bool_t Use_Splines_for_Eff_
const char * AsString() const
Return the date & time as a string (ctime() format).
VariableInfo & GetVariableInfo(Int_t i)
void AddPreDefVal(const T &)
virtual void WriteMonitoringHistosToFile() const
write special monitoring histograms to file dummy implementation here --------------— ...
ClassInfo * AddClass(const TString &className)
void AddXMLTo(void *parent)
XML file writing.
Bool_t fConstructedFromWeightFile
void ProcessSetup()
process all options the "CheckForUnusedOptions" is done in an independent call, since it may be overr...
TString fVarTransformString
virtual void AddMulticlassOutput(Types::ETreeType type)
prepare tree branch with the method's discriminating variable
const TString & GetOptions() const
Mother of all ROOT objects.
void SetConfigName(const char *n)
void ValidatePDF(TH1 *original=0) const
comparison of original histogram with reference PDF
Float_t GetAchievableEff(UInt_t cls)
void SetSource(const std::string &source)
virtual std::vector< Float_t > GetMulticlassTrainingEfficiency(std::vector< std::vector< Float_t > > &purity)
virtual void SetXTitle(const char *title)
virtual void TestRegression(Double_t &bias, Double_t &biasT, Double_t &dev, Double_t &devT, Double_t &rms, Double_t &rmsT, Double_t &mInf, Double_t &mInfT, Double_t &corr, Types::ETreeType type)
calculate <sum-of-deviation-squared> of regression output versus "true" value from test sample ...
virtual Bool_t cd(const char *path=0)
Change current directory to "this" directory.
void ReadFromStream(std::istream &istr)
TDirectory * BaseDir() const
returns the ROOT directory where info/histograms etc of the corresponding MVA method instance are sto...
TString GetMethodTypeName() const
static MethodBase * GetThisBase()
return a pointer the base class of this method
virtual void DeclareCompatibilityOptions()
options that are used ONLY for the READER to ensure backward compatibility they are hence without any...
Short_t Max(Short_t a, Short_t b)
void AddToXML(void *varnode)
write class to XML
Double_t fSignalReferenceCut
the data set information (sometimes needed)
void SetWeightFileDir(TString fileDir)
set directory of weight file
XMLNodePointer_t DocGetRootElement(XMLDocPointer_t xmldoc)
returns root node of document
Double_t GetSignalReferenceCut() const
virtual void Sumw2(Bool_t flag=kTRUE)
Create structure to store sum of squares of weights.
A Graph is a graphics object made of two arrays X and Y with npoints each.
TH1 * GetOriginalHist() const
virtual TDirectory * GetDirectory(const char *namecycle, Bool_t printError=false, const char *funcname="GetDirectory")
Find a directory using apath.
Long64_t GetNEvents(Types::ETreeType type=Types::kMaxTreeType) const
virtual Double_t GetTrainingEfficiency(const TString &)
void AddOptionsXMLTo(void *parent) const
write options to XML file
Double_t GetIntegral(Double_t xmin, Double_t xmax)
computes PDF integral within given ranges
void ReadSpectatorsFromXML(void *specnode)
read spectator info from XML
Int_t Atoi() const
Return integer value of string.
Bool_t IsSignal(const Event *ev) const
void DrawProgressBar(Int_t, const TString &comment="")
draws progress bar in color or B&W caution:
ECutOrientation GetCutOrientation() const
void InitBase()
default initialization called by all constructors
std::vector< Float_t > * fRegressionReturnVal
Types::EAnalysisType GetAnalysisType() const
void AddTargetsXMLTo(void *parent) const
write target info to XML
void Store(TObject *obj, const char *alias=0)
virtual Int_t GetNbinsX() const
static Double_t IGetEffForRoot(Double_t)
interface for RootFinder
Double_t Sqrt(Double_t x)
const TString & GetTestvarName() const
virtual Int_t GetSize() const
virtual void ReadWeightsFromStream(std::istream &)=0
Int_t Fill(Double_t)
Invalid Fill method.
void SetTestvarName(const TString &v="")
double norm(double *x, double *p)
Types::EMVA GetMethodType() const
void CheckForUnusedOptions() const
checks for unused options in option string
virtual Int_t GetMaximumBin() const
Return location of bin with maximum value in the range.
virtual void TestClassification()
initialization
void ReadStateFromStream(std::istream &tf)
read the header from the weight files of the different MVA methods
virtual Int_t Write(const char *name=0, Int_t option=0, Int_t bufsize=0)
Write all objects in this collection.
const Event * GetEvent() const
std::vector< VariableInfo > & GetVariableInfos()
void SetExternalLink(void *p)
virtual void SetAnalysisType(Types::EAnalysisType type)
void Resize(Int_t entries)
void NoErrorCalc(Double_t *const err, Double_t *const errUpper)
void SetSignalReferenceCut(Double_t cut)
void WriteOptionsToStream(std::ostream &o, const TString &prefix) const
write options to output stream (e.g. in writing the MVA weight files
void Resize(Ssiz_t n)
Resize the string. Truncate or add blanks as necessary.
This class stores the date and time with a precision of one second in an unsigned 32 bit word (950130...
Double_t GetVal(Double_t x) const
returns value PDF(x)
void SetConfigDescription(const char *d)
virtual void Close(Option_t *option="")
Close a file.
const char * Data() const