ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/UserCode/benhoob/HWW/Classify_HWW.C
Revision: 1.2
Committed: Mon Feb 14 12:39:14 2011 UTC (14 years, 3 months ago) by benhoob
Content type: text/plain
Branch: MAIN
Changes since 1.1: +14 -79 lines
Log Message:
Initial commit

File Contents

# Content
1 /**********************************************************************************
2 * Project : TMVA - a Root-integrated toolkit for multivariate data analysis *
3 * Package : TMVA *
4 * Exectuable: TMVAClassificationApplication *
5 * *
6 * This macro provides a simple example on how to use the trained classifiers *
7 * within an analysis module *
8 **********************************************************************************/
9
10 #include <cstdlib>
11 #include <vector>
12 #include <iostream>
13 #include <map>
14 #include <string>
15
16 #include "TFile.h"
17 #include "TTree.h"
18 #include "TString.h"
19 #include "TSystem.h"
20 #include "TROOT.h"
21 #include "TStopwatch.h"
22 #include "TChain.h"
23
24 #include "TMVAGui.C"
25
26 #if not defined(__CINT__) || defined(__MAKECINT__)
27 #include "TMVA/Tools.h"
28 #include "TMVA/Reader.h"
29 #include "TMVA/MethodCuts.h"
30 #endif
31
32 using namespace TMVA;
33
34 void Classify_HWW( TString myMethodList = "" )
35 {
36 #ifdef __CINT__
37 gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
38 #endif
39
40 //---------------------------------------------------------------
41
42 // This loads the library
43 TMVA::Tools::Instance();
44
45 // Default MVA methods to be trained + tested
46 std::map<std::string,int> Use;
47
48 // --- Cut optimisation
49 Use["Cuts"] = 1;
50 Use["CutsD"] = 1;
51 Use["CutsPCA"] = 0;
52 Use["CutsGA"] = 0;
53 Use["CutsSA"] = 0;
54 //
55 // --- 1-dimensional likelihood ("naive Bayes estimator")
56 Use["Likelihood"] = 1;
57 Use["LikelihoodD"] = 0; // the "D" extension indicates decorrelated input variables (see option strings)
58 Use["LikelihoodPCA"] = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
59 Use["LikelihoodKDE"] = 0;
60 Use["LikelihoodMIX"] = 0;
61 //
62 // --- Mutidimensional likelihood and Nearest-Neighbour methods
63 Use["PDERS"] = 1;
64 Use["PDERSD"] = 0;
65 Use["PDERSPCA"] = 0;
66 Use["PDEFoam"] = 1;
67 Use["PDEFoamBoost"] = 0; // uses generalised MVA method boosting
68 Use["KNN"] = 1; // k-nearest neighbour method
69 //
70 // --- Linear Discriminant Analysis
71 Use["LD"] = 1; // Linear Discriminant identical to Fisher
72 Use["Fisher"] = 0;
73 Use["FisherG"] = 0;
74 Use["BoostedFisher"] = 0; // uses generalised MVA method boosting
75 Use["HMatrix"] = 0;
76 //
77 // --- Function Discriminant analysis
78 Use["FDA_GA"] = 1; // minimisation of user-defined function using Genetics Algorithm
79 Use["FDA_SA"] = 0;
80 Use["FDA_MC"] = 0;
81 Use["FDA_MT"] = 0;
82 Use["FDA_GAMT"] = 0;
83 Use["FDA_MCMT"] = 0;
84 //
85 // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
86 Use["MLP"] = 0; // Recommended ANN
87 Use["MLPBFGS"] = 0; // Recommended ANN with optional training method
88 Use["MLPBNN"] = 1; // Recommended ANN with BFGS training method and bayesian regulator
89 Use["CFMlpANN"] = 0; // Depreciated ANN from ALEPH
90 Use["TMlpANN"] = 0; // ROOT's own ANN
91 //
92 // --- Support Vector Machine
93 Use["SVM"] = 1;
94 //
95 // --- Boosted Decision Trees
96 Use["BDT"] = 1; // uses Adaptive Boost
97 Use["BDTG"] = 0; // uses Gradient Boost
98 Use["BDTB"] = 0; // uses Bagging
99 Use["BDTD"] = 0; // decorrelation + Adaptive Boost
100 //
101 // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
102 Use["RuleFit"] = 1;
103 // ---------------------------------------------------------------
104 Use["Plugin"] = 0;
105 Use["Category"] = 0;
106 Use["SVM_Gauss"] = 0;
107 Use["SVM_Poly"] = 0;
108 Use["SVM_Lin"] = 0;
109
110 std::cout << std::endl;
111 std::cout << "==> Start TMVAClassificationApplication" << std::endl;
112
113 // Select methods (don't look at this code - not of interest)
114 if (myMethodList != "") {
115 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
116
117 std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
118 for (UInt_t i=0; i<mlist.size(); i++) {
119 std::string regMethod(mlist[i]);
120
121 if (Use.find(regMethod) == Use.end()) {
122 std::cout << "Method \"" << regMethod
123 << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
124 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
125 std::cout << it->first << " ";
126 }
127 std::cout << std::endl;
128 return;
129 }
130 Use[regMethod] = 1;
131 }
132 }
133
134 // --------------------------------------------------------------------------------------------------
135
136 //-----------------------------------
137 //select samples to run over
138 //-----------------------------------
139
140 vector<char*> samples;
141 samples.push_back("WWTo2L2Nu");
142 samples.push_back("GluGluToWWTo4L");
143 samples.push_back("WZ");
144 samples.push_back("ZZ");
145 samples.push_back("TTJets");
146 samples.push_back("tW");
147 samples.push_back("WJetsToLNu");
148 samples.push_back("DY");
149 samples.push_back("Higgs130");
150 // samples.push_back("Higgs160");
151 // samples.push_back("Higgs200");
152
153 const unsigned int nsamples = samples.size();
154
155 for( unsigned int i = 0 ; i < nsamples ; ++i ){
156
157 // --- Create the Reader object
158
159 TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );
160
161 // Create a set of variables and declare them to the reader
162 // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
163 // Float_t var1, var2;
164 // Float_t var3, var4;
165 // reader->AddVariable( "myvar1 := var1+var2", &var1 );
166 // reader->AddVariable( "myvar2 := var1-var2", &var2 );
167 // reader->AddVariable( "var3", &var3 );
168 // reader->AddVariable( "var4", &var4 );
169
170 Float_t lephard_pt;
171 Float_t lepsoft_pt;
172 Float_t dil_dphi;
173 Float_t dil_mass;
174
175 reader->AddVariable( "lephard_pt" , &lephard_pt );
176 reader->AddVariable( "lepsoft_pt" , &lepsoft_pt );
177 reader->AddVariable( "dil_dphi" , &dil_dphi );
178 reader->AddVariable( "dil_mass" , &dil_mass );
179
180 // Spectator variables declared in the training have to be added to the reader, too
181 // Float_t spec1,spec2;
182 // reader->AddSpectator( "spec1 := var1*2", &spec1 );
183 // reader->AddSpectator( "spec2 := var1*3", &spec2 );
184
185 Float_t Category_cat1, Category_cat2, Category_cat3;
186 if (Use["Category"]){
187 // Add artificial spectators for distinguishing categories
188 // reader->AddSpectator( "Category_cat1 := var3<=0", &Category_cat1 );
189 // reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)", &Category_cat2 );
190 // reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
191 }
192
193 // --- Book the MVA methods
194
195 //TString dir = "weights/";
196 //TString path = "Trainings/H130_WWTo2L2Nu/";
197 //TString path = "Trainings/H130_WWTo2L2Nu_WJetsToLNu/";
198 TString path = "Trainings/H130_allbkg/";
199 TString dir = path + "weights/";
200 TString outdir = path + "output/";
201
202 TString prefix = "TMVAClassification";
203
204 // Book method(s)
205 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
206 if (it->second) {
207 TString methodName = TString(it->first) + TString(" method");
208 TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
209 reader->BookMVA( methodName, weightfile );
210 }
211 }
212
213 // Book output histograms
214 UInt_t nbin = 1000;
215 TH1F *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
216 TH1F *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
217 TH1F *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
218 TH1F *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
219 TH1F *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);
220
221 if (Use["Likelihood"]) histLk = new TH1F( "MVA_Likelihood", "MVA_Likelihood", nbin, -1, 1 );
222 if (Use["LikelihoodD"]) histLkD = new TH1F( "MVA_LikelihoodD", "MVA_LikelihoodD", nbin, -1, 0.9999 );
223 if (Use["LikelihoodPCA"]) histLkPCA = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
224 if (Use["LikelihoodKDE"]) histLkKDE = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin, -0.00001, 0.99999 );
225 if (Use["LikelihoodMIX"]) histLkMIX = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin, 0, 1 );
226 if (Use["PDERS"]) histPD = new TH1F( "MVA_PDERS", "MVA_PDERS", nbin, 0, 1 );
227 if (Use["PDERSD"]) histPDD = new TH1F( "MVA_PDERSD", "MVA_PDERSD", nbin, 0, 1 );
228 if (Use["PDERSPCA"]) histPDPCA = new TH1F( "MVA_PDERSPCA", "MVA_PDERSPCA", nbin, 0, 1 );
229 if (Use["KNN"]) histKNN = new TH1F( "MVA_KNN", "MVA_KNN", nbin, 0, 1 );
230 if (Use["HMatrix"]) histHm = new TH1F( "MVA_HMatrix", "MVA_HMatrix", nbin, -0.95, 1.55 );
231 if (Use["Fisher"]) histFi = new TH1F( "MVA_Fisher", "MVA_Fisher", nbin, -4, 4 );
232 if (Use["FisherG"]) histFiG = new TH1F( "MVA_FisherG", "MVA_FisherG", nbin, -1, 1 );
233 if (Use["BoostedFisher"]) histFiB = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
234 if (Use["LD"]) histLD = new TH1F( "MVA_LD", "MVA_LD", nbin, -2, 2 );
235 if (Use["MLP"]) histNn = new TH1F( "MVA_MLP", "MVA_MLP", nbin, -1.25, 1.5 );
236 if (Use["MLPBFGS"]) histNnbfgs = new TH1F( "MVA_MLPBFGS", "MVA_MLPBFGS", nbin, -1.25, 1.5 );
237 if (Use["MLPBNN"]) histNnbnn = new TH1F( "MVA_MLPBNN", "MVA_MLPBNN", nbin, -1.25, 1.5 );
238 if (Use["CFMlpANN"]) histNnC = new TH1F( "MVA_CFMlpANN", "MVA_CFMlpANN", nbin, 0, 1 );
239 if (Use["TMlpANN"]) histNnT = new TH1F( "MVA_TMlpANN", "MVA_TMlpANN", nbin, -1.3, 1.3 );
240 if (Use["BDT"]) histBdt = new TH1F( "MVA_BDT", "MVA_BDT", nbin, -0.8, 0.8 );
241 if (Use["BDTD"]) histBdtD = new TH1F( "MVA_BDTD", "MVA_BDTD", nbin, -0.8, 0.8 );
242 if (Use["BDTG"]) histBdtG = new TH1F( "MVA_BDTG", "MVA_BDTG", nbin, -1.0, 1.0 );
243 if (Use["RuleFit"]) histRf = new TH1F( "MVA_RuleFit", "MVA_RuleFit", nbin, -2.0, 2.0 );
244 if (Use["SVM_Gauss"]) histSVMG = new TH1F( "MVA_SVM_Gauss", "MVA_SVM_Gauss", nbin, 0.0, 1.0 );
245 if (Use["SVM_Poly"]) histSVMP = new TH1F( "MVA_SVM_Poly", "MVA_SVM_Poly", nbin, 0.0, 1.0 );
246 if (Use["SVM_Lin"]) histSVML = new TH1F( "MVA_SVM_Lin", "MVA_SVM_Lin", nbin, 0.0, 1.0 );
247 if (Use["FDA_MT"]) histFDAMT = new TH1F( "MVA_FDA_MT", "MVA_FDA_MT", nbin, -2.0, 3.0 );
248 if (Use["FDA_GA"]) histFDAGA = new TH1F( "MVA_FDA_GA", "MVA_FDA_GA", nbin, -2.0, 3.0 );
249 if (Use["Category"]) histCat = new TH1F( "MVA_Category", "MVA_Category", nbin, -2., 2. );
250 if (Use["Plugin"]) histPBdt = new TH1F( "MVA_PBDT", "MVA_BDT", nbin, -0.8, 0.8 );
251
252 // PDEFoam also returns per-event error, fill in histogram, and also fill significance
253 if (Use["PDEFoam"]) {
254 histPDEFoam = new TH1F( "MVA_PDEFoam", "MVA_PDEFoam", nbin, 0, 1 );
255 histPDEFoamErr = new TH1F( "MVA_PDEFoamErr", "MVA_PDEFoam error", nbin, 0, 1 );
256 histPDEFoamSig = new TH1F( "MVA_PDEFoamSig", "MVA_PDEFoam significance", nbin, 0, 10 );
257 }
258
259 // Book example histogram for probability (the other methods are done similarly)
260 TH1F *probHistFi(0), *rarityHistFi(0);
261 if (Use["Fisher"]) {
262 probHistFi = new TH1F( "MVA_Fisher_Proba", "MVA_Fisher_Proba", nbin, 0, 1 );
263 rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
264 }
265
266 // Prepare input tree (this must be replaced by your data source)
267 // in this example, there is a toy tree with signal and one with background events
268 // we'll later on use only the "signal" events for the test in this example.
269 //
270
271 char* iter = "v2";
272 TChain *ch = new TChain("Events");
273
274 if( strcmp( samples.at(i) , "DY" ) == 0 ){
275 ch -> Add( Form("babies/%s/DYToMuMuM20_PU_testFinal_baby.root",iter) );
276 ch -> Add( Form("babies/%s/DYToMuMuM10To20_PU_testFinal_baby.root",iter) );
277 ch -> Add( Form("babies/%s/DYToEEM20_PU_testFinal_baby.root",iter) );
278 ch -> Add( Form("babies/%s/DYToEEM10To20_PU_testFinal_baby.root",iter) );
279 ch -> Add( Form("babies/%s/DYToTauTauM20_PU_testFinal_baby.root",iter) );
280 ch -> Add( Form("babies/%s/DYToTauTauM10To20_PU_testFinal_baby.root",iter) );
281 }
282 else if( strcmp( samples.at(i) , "Higgs130" ) == 0 ){
283 ch -> Add( Form("babies/%s/HToWWTo2L2NuM130_PU_testFinal_baby.root",iter) );
284 ch -> Add( Form("babies/%s/HToWWToLNuTauNuM130_PU_testFinal_baby.root",iter) );
285 ch -> Add( Form("babies/%s/HToWWTo2Tau2NuM130_PU_testFinal_baby.root",iter) );
286 }
287 else if( strcmp( samples.at(i) , "Higgs160" ) == 0 ){
288 ch -> Add( Form("babies/%s/HToWWTo2L2NuM160_PU_testFinal_baby.root",iter) );
289 ch -> Add( Form("babies/%s/HToWWToLNuTauNuM160_PU_testFinal_baby.root",iter) );
290 ch -> Add( Form("babies/%s/HToWWTo2Tau2NuM160_PU_testFinal_baby.root",iter) );
291 }
292 else if( strcmp( samples.at(i) , "Higgs200" ) == 0 ){
293 ch -> Add( Form("babies/%s/HToWWTo2L2NuM200_PU_testFinal_baby.root",iter) );
294 ch -> Add( Form("babies/%s/HToWWToLNuTauNuM200_PU_testFinal_baby.root",iter) );
295 ch -> Add( Form("babies/%s/HToWWTo2Tau2NuM200_PU_testFinal_baby.root",iter) );
296 }
297 else{
298 ch -> Add( Form("babies/%s/%s_PU_testFinal_baby.root",iter,samples.at(i)) );
299 }
300
301
302
303 // --- Event loop
304
305 // Prepare the event tree
306 // - here the variable names have to corresponds to your tree
307 // - you can use the same variables as above which is slightly faster,
308 // but of course you can use different ones and copy the values inside the event loop
309 //
310
311 TTree *theTree = (TTree*) ch;
312
313 std::cout << "--- Using input files: -------------------" << std::endl;
314
315 TObjArray *listOfFiles = ch->GetListOfFiles();
316 TIter fileIter(listOfFiles);
317 TChainElement* currentFile = 0;
318
319 while((currentFile = (TChainElement*)fileIter.Next())) {
320 std::cout << currentFile->GetTitle() << std::endl;
321 }
322
323 Float_t lephard_pt_;
324 Float_t lepsoft_pt_;
325 Float_t dil_dphi_;
326 Float_t dil_mass_;
327 Int_t event_type_;
328 Float_t met_projpt_;
329 Int_t jets_num_;
330 Int_t extralep_num_;
331 Int_t lowptbtags_num_;
332 Int_t softmu_num_;
333 Float_t event_scale1fb_;
334
335 theTree->SetBranchAddress( "lephard_pt_" , &lephard_pt_ );
336 theTree->SetBranchAddress( "lepsoft_pt_" , &lepsoft_pt_ );
337 theTree->SetBranchAddress( "dil_dphi_" , &dil_dphi_ );
338 theTree->SetBranchAddress( "dil_mass_" , &dil_mass_ );
339 theTree->SetBranchAddress( "event_type_" , &event_type_ );
340 theTree->SetBranchAddress( "met_projpt_" , &met_projpt_ );
341 theTree->SetBranchAddress( "jets_num_" , &jets_num_ );
342 theTree->SetBranchAddress( "extralep_num_" , &extralep_num_ );
343 theTree->SetBranchAddress( "lowptbtags_num_" , &lowptbtags_num_ );
344 theTree->SetBranchAddress( "softmu_num_" , &softmu_num_ );
345 theTree->SetBranchAddress( "event_scale1fb_" , &event_scale1fb_ );
346
347 // Efficiency calculator for cut method
348 Int_t nSelCutsGA = 0;
349 Double_t effS = 0.7;
350
351 std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests
352
353 std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
354 TStopwatch sw;
355 sw.Start();
356
357 int npass = 0;
358
359 for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
360
361 if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;
362
363 theTree->GetEntry(ievt);
364
365 if( event_type_ == 2 && met_projpt_ < 20. ) continue;
366 if( event_type_ != 2 && met_projpt_ < 35. ) continue;
367 if( lephard_pt_ < 20. ) continue;
368 //if( lepsoft_pt_ < 10. ) continue;
369 if( lepsoft_pt_ < 20. ) continue;
370 if( jets_num_ > 0 ) continue;
371 if( extralep_num_ > 0 ) continue;
372 if( lowptbtags_num_ > 0 ) continue;
373 if( softmu_num_ > 0 ) continue;
374 if( dil_mass_ < 12. ) continue;
375
376 float weight = event_scale1fb_ * 0.0355;
377
378 lephard_pt = lephard_pt_;
379 lepsoft_pt = lepsoft_pt_;
380 dil_mass = dil_mass_;
381 dil_dphi = dil_dphi_;
382
383 npass++;
384 // var1 = userVar1 + userVar2;
385 // var2 = userVar1 - userVar2;
386
387 // --- Return the MVA outputs and fill into histograms
388
389 if (Use["CutsGA"]) {
390 // Cuts is a special case: give the desired signal efficienciy
391 Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
392 if (passed) nSelCutsGA++;
393 }
394
395 if (Use["Likelihood" ]) histLk ->Fill( reader->EvaluateMVA( "Likelihood method" ) , weight);
396 if (Use["LikelihoodD" ]) histLkD ->Fill( reader->EvaluateMVA( "LikelihoodD method" ) , weight);
397 if (Use["LikelihoodPCA"]) histLkPCA ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) , weight);
398 if (Use["LikelihoodKDE"]) histLkKDE ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) , weight);
399 if (Use["LikelihoodMIX"]) histLkMIX ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) , weight);
400 if (Use["PDERS" ]) histPD ->Fill( reader->EvaluateMVA( "PDERS method" ) , weight);
401 if (Use["PDERSD" ]) histPDD ->Fill( reader->EvaluateMVA( "PDERSD method" ) , weight);
402 if (Use["PDERSPCA" ]) histPDPCA ->Fill( reader->EvaluateMVA( "PDERSPCA method" ) , weight);
403 if (Use["KNN" ]) histKNN ->Fill( reader->EvaluateMVA( "KNN method" ) , weight);
404 if (Use["HMatrix" ]) histHm ->Fill( reader->EvaluateMVA( "HMatrix method" ) , weight);
405 if (Use["Fisher" ]) histFi ->Fill( reader->EvaluateMVA( "Fisher method" ) , weight);
406 if (Use["FisherG" ]) histFiG ->Fill( reader->EvaluateMVA( "FisherG method" ) , weight);
407 if (Use["BoostedFisher"]) histFiB ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) , weight);
408 if (Use["LD" ]) histLD ->Fill( reader->EvaluateMVA( "LD method" ) , weight);
409 if (Use["MLP" ]) histNn ->Fill( reader->EvaluateMVA( "MLP method" ) , weight);
410 if (Use["MLPBFGS" ]) histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method" ) , weight);
411 if (Use["MLPBNN" ]) histNnbnn ->Fill( reader->EvaluateMVA( "MLPBNN method" ) , weight);
412 if (Use["CFMlpANN" ]) histNnC ->Fill( reader->EvaluateMVA( "CFMlpANN method" ) , weight);
413 if (Use["TMlpANN" ]) histNnT ->Fill( reader->EvaluateMVA( "TMlpANN method" ) , weight);
414 if (Use["BDT" ]) histBdt ->Fill( reader->EvaluateMVA( "BDT method" ) , weight);
415 if (Use["BDTD" ]) histBdtD ->Fill( reader->EvaluateMVA( "BDTD method" ) , weight);
416 if (Use["BDTG" ]) histBdtG ->Fill( reader->EvaluateMVA( "BDTG method" ) , weight);
417 if (Use["RuleFit" ]) histRf ->Fill( reader->EvaluateMVA( "RuleFit method" ) , weight);
418 if (Use["SVM_Gauss" ]) histSVMG ->Fill( reader->EvaluateMVA( "SVM_Gauss method" ) , weight);
419 if (Use["SVM_Poly" ]) histSVMP ->Fill( reader->EvaluateMVA( "SVM_Poly method" ) , weight);
420 if (Use["SVM_Lin" ]) histSVML ->Fill( reader->EvaluateMVA( "SVM_Lin method" ) , weight);
421 if (Use["FDA_MT" ]) histFDAMT ->Fill( reader->EvaluateMVA( "FDA_MT method" ) , weight);
422 if (Use["FDA_GA" ]) histFDAGA ->Fill( reader->EvaluateMVA( "FDA_GA method" ) , weight);
423 if (Use["Category" ]) histCat ->Fill( reader->EvaluateMVA( "Category method" ) , weight);
424 if (Use["Plugin" ]) histPBdt ->Fill( reader->EvaluateMVA( "P_BDT method" ) , weight);
425
426 // Retrieve also per-event error
427 if (Use["PDEFoam"]) {
428 Double_t val = reader->EvaluateMVA( "PDEFoam method" );
429 Double_t err = reader->GetMVAError();
430 histPDEFoam ->Fill( val );
431 histPDEFoamErr->Fill( err );
432 if (err>1.e-50) histPDEFoamSig->Fill( val/err , weight);
433 }
434
435 // Retrieve probability instead of MVA output
436 if (Use["Fisher"]) {
437 probHistFi ->Fill( reader->GetProba ( "Fisher method" ) , weight);
438 rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) , weight);
439 }
440 }
441
442 std::cout << npass << " events passing selection" << std::endl;
443
444 // Get elapsed time
445 sw.Stop();
446 std::cout << "--- End of event loop: "; sw.Print();
447
448 // Get efficiency for cuts classifier
449 if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
450 << " (for a required signal efficiency of " << effS << ")" << std::endl;
451
452 if (Use["CutsGA"]) {
453
454 // test: retrieve cuts for particular signal efficiency
455 // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer
456 TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;
457
458 if (mcuts) {
459 std::vector<Double_t> cutsMin;
460 std::vector<Double_t> cutsMax;
461 mcuts->GetCuts( 0.7, cutsMin, cutsMax );
462 std::cout << "--- -------------------------------------------------------------" << std::endl;
463 std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
464 for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
465 std::cout << "... Cut: "
466 << cutsMin[ivar]
467 << " < \""
468 << mcuts->GetInputVar(ivar)
469 << "\" <= "
470 << cutsMax[ivar] << std::endl;
471 }
472 std::cout << "--- -------------------------------------------------------------" << std::endl;
473 }
474 }
475
476 // --- Write histograms
477 cout << "dir " << dir << endl;
478 char* mydir = outdir;
479 TFile *target = new TFile( Form("%s/%s.root",mydir,samples.at(i) ) ,"RECREATE" );
480 cout << "Writing to file " << Form("%s/%s.root",mydir,samples.at(i) ) << endl;
481
482 if (Use["Likelihood" ]) histLk ->Write();
483 if (Use["LikelihoodD" ]) histLkD ->Write();
484 if (Use["LikelihoodPCA"]) histLkPCA ->Write();
485 if (Use["LikelihoodKDE"]) histLkKDE ->Write();
486 if (Use["LikelihoodMIX"]) histLkMIX ->Write();
487 if (Use["PDERS" ]) histPD ->Write();
488 if (Use["PDERSD" ]) histPDD ->Write();
489 if (Use["PDERSPCA" ]) histPDPCA ->Write();
490 if (Use["KNN" ]) histKNN ->Write();
491 if (Use["HMatrix" ]) histHm ->Write();
492 if (Use["Fisher" ]) histFi ->Write();
493 if (Use["FisherG" ]) histFiG ->Write();
494 if (Use["BoostedFisher"]) histFiB ->Write();
495 if (Use["LD" ]) histLD ->Write();
496 if (Use["MLP" ]) histNn ->Write();
497 if (Use["MLPBFGS" ]) histNnbfgs ->Write();
498 if (Use["MLPBNN" ]) histNnbnn ->Write();
499 if (Use["CFMlpANN" ]) histNnC ->Write();
500 if (Use["TMlpANN" ]) histNnT ->Write();
501 if (Use["BDT" ]) histBdt ->Write();
502 if (Use["BDTD" ]) histBdtD ->Write();
503 if (Use["BDTG" ]) histBdtG ->Write();
504 if (Use["RuleFit" ]) histRf ->Write();
505 if (Use["SVM_Gauss" ]) histSVMG ->Write();
506 if (Use["SVM_Poly" ]) histSVMP ->Write();
507 if (Use["SVM_Lin" ]) histSVML ->Write();
508 if (Use["FDA_MT" ]) histFDAMT ->Write();
509 if (Use["FDA_GA" ]) histFDAGA ->Write();
510 if (Use["Category" ]) histCat ->Write();
511 if (Use["Plugin" ]) histPBdt ->Write();
512
513 // Write also error and significance histos
514 if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }
515
516 // Write also probability hists
517 if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
518 target->Close();
519
520 delete reader;
521
522 std::cout << "==> TMVAClassificationApplication is done with sample " << samples.at(i) << endl << std::endl;
523 }
524 }