ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/UserCode/benhoob/HWW/Classify_HWW.C
Revision: 1.1
Committed: Thu Feb 10 13:40:34 2011 UTC (14 years, 3 months ago) by benhoob
Content type: text/plain
Branch: MAIN
Log Message:
Initial commit

File Contents

# User Rev Content
1 benhoob 1.1 /**********************************************************************************
2     * Project : TMVA - a Root-integrated toolkit for multivariate data analysis *
3     * Package : TMVA *
4     * Exectuable: TMVAClassificationApplication *
5     * *
6     * This macro provides a simple example on how to use the trained classifiers *
7     * within an analysis module *
8     **********************************************************************************/
9    
10     #include <cstdlib>
11     #include <vector>
12     #include <iostream>
13     #include <map>
14     #include <string>
15    
16     #include "TFile.h"
17     #include "TTree.h"
18     #include "TString.h"
19     #include "TSystem.h"
20     #include "TROOT.h"
21     #include "TStopwatch.h"
22     #include "TChain.h"
23    
24     #include "TMVAGui.C"
25    
26     #if not defined(__CINT__) || defined(__MAKECINT__)
27     #include "TMVA/Tools.h"
28     #include "TMVA/Reader.h"
29     #include "TMVA/MethodCuts.h"
30     #endif
31    
32     using namespace TMVA;
33    
34     void Classify_HWW( TString myMethodList = "" )
35     {
36     #ifdef __CINT__
37     gROOT->ProcessLine( ".O0" ); // turn off optimization in CINT
38     #endif
39    
40     //---------------------------------------------------------------
41    
42     // This loads the library
43     TMVA::Tools::Instance();
44    
45     // Default MVA methods to be trained + tested
46     std::map<std::string,int> Use;
47    
48     // --- Cut optimisation
49     Use["Cuts"] = 1;
50     Use["CutsD"] = 1;
51     Use["CutsPCA"] = 0;
52     Use["CutsGA"] = 0;
53     Use["CutsSA"] = 0;
54     //
55     // --- 1-dimensional likelihood ("naive Bayes estimator")
56     Use["Likelihood"] = 1;
57     Use["LikelihoodD"] = 0; // the "D" extension indicates decorrelated input variables (see option strings)
58     Use["LikelihoodPCA"] = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
59     Use["LikelihoodKDE"] = 0;
60     Use["LikelihoodMIX"] = 0;
61     //
62     // --- Mutidimensional likelihood and Nearest-Neighbour methods
63     Use["PDERS"] = 1;
64     Use["PDERSD"] = 0;
65     Use["PDERSPCA"] = 0;
66     Use["PDEFoam"] = 1;
67     Use["PDEFoamBoost"] = 0; // uses generalised MVA method boosting
68     Use["KNN"] = 1; // k-nearest neighbour method
69     //
70     // --- Linear Discriminant Analysis
71     Use["LD"] = 1; // Linear Discriminant identical to Fisher
72     Use["Fisher"] = 0;
73     Use["FisherG"] = 0;
74     Use["BoostedFisher"] = 0; // uses generalised MVA method boosting
75     Use["HMatrix"] = 0;
76     //
77     // --- Function Discriminant analysis
78     Use["FDA_GA"] = 1; // minimisation of user-defined function using Genetics Algorithm
79     Use["FDA_SA"] = 0;
80     Use["FDA_MC"] = 0;
81     Use["FDA_MT"] = 0;
82     Use["FDA_GAMT"] = 0;
83     Use["FDA_MCMT"] = 0;
84     //
85     // --- Neural Networks (all are feed-forward Multilayer Perceptrons)
86     Use["MLP"] = 0; // Recommended ANN
87     Use["MLPBFGS"] = 0; // Recommended ANN with optional training method
88     Use["MLPBNN"] = 1; // Recommended ANN with BFGS training method and bayesian regulator
89     Use["CFMlpANN"] = 0; // Depreciated ANN from ALEPH
90     Use["TMlpANN"] = 0; // ROOT's own ANN
91     //
92     // --- Support Vector Machine
93     Use["SVM"] = 1;
94     //
95     // --- Boosted Decision Trees
96     Use["BDT"] = 1; // uses Adaptive Boost
97     Use["BDTG"] = 0; // uses Gradient Boost
98     Use["BDTB"] = 0; // uses Bagging
99     Use["BDTD"] = 0; // decorrelation + Adaptive Boost
100     //
101     // --- Friedman's RuleFit method, ie, an optimised series of cuts ("rules")
102     Use["RuleFit"] = 1;
103     // ---------------------------------------------------------------
104     Use["Plugin"] = 0;
105     Use["Category"] = 0;
106     Use["SVM_Gauss"] = 0;
107     Use["SVM_Poly"] = 0;
108     Use["SVM_Lin"] = 0;
109    
110     std::cout << std::endl;
111     std::cout << "==> Start TMVAClassificationApplication" << std::endl;
112    
113     // Select methods (don't look at this code - not of interest)
114     if (myMethodList != "") {
115     for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
116    
117     std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
118     for (UInt_t i=0; i<mlist.size(); i++) {
119     std::string regMethod(mlist[i]);
120    
121     if (Use.find(regMethod) == Use.end()) {
122     std::cout << "Method \"" << regMethod
123     << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
124     for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
125     std::cout << it->first << " ";
126     }
127     std::cout << std::endl;
128     return;
129     }
130     Use[regMethod] = 1;
131     }
132     }
133    
134     // --------------------------------------------------------------------------------------------------
135    
136     //-----------------------------------
137     //select samples to run over
138     //-----------------------------------
139    
140     vector<char*> samples;
141     samples.push_back("WWTo2L2Nu");
142     // samples.push_back("GluGluToWWTo4L");
143     // samples.push_back("WZ");
144     // samples.push_back("ZZ");
145     // samples.push_back("TTJets");
146     // samples.push_back("tW");
147     // samples.push_back("WJetsToLNu");
148     // samples.push_back("DY");
149     // samples.push_back("Higgs130");
150     // samples.push_back("Higgs160");
151     // samples.push_back("Higgs200");
152    
153     const unsigned int nsamples = samples.size();
154    
155     for( unsigned int i = 0 ; i < nsamples ; ++i ){
156    
157     // --- Create the Reader object
158    
159     TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );
160    
161     // Create a set of variables and declare them to the reader
162     // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
163     // Float_t var1, var2;
164     // Float_t var3, var4;
165     // reader->AddVariable( "myvar1 := var1+var2", &var1 );
166     // reader->AddVariable( "myvar2 := var1-var2", &var2 );
167     // reader->AddVariable( "var3", &var3 );
168     // reader->AddVariable( "var4", &var4 );
169    
170     Float_t lephard_pt;
171     Float_t lepsoft_pt;
172     Float_t dil_dphi;
173     Float_t dil_mass;
174    
175     reader->AddVariable( "lephard_pt" , &lephard_pt );
176     reader->AddVariable( "lepsoft_pt" , &lepsoft_pt );
177     reader->AddVariable( "dil_dphi" , &dil_dphi );
178     reader->AddVariable( "dil_mass" , &dil_mass );
179    
180     // Spectator variables declared in the training have to be added to the reader, too
181     // Float_t spec1,spec2;
182     // reader->AddSpectator( "spec1 := var1*2", &spec1 );
183     // reader->AddSpectator( "spec2 := var1*3", &spec2 );
184    
185     Float_t Category_cat1, Category_cat2, Category_cat3;
186     if (Use["Category"]){
187     // Add artificial spectators for distinguishing categories
188     // reader->AddSpectator( "Category_cat1 := var3<=0", &Category_cat1 );
189     // reader->AddSpectator( "Category_cat2 := (var3>0)&&(var4<0)", &Category_cat2 );
190     // reader->AddSpectator( "Category_cat3 := (var3>0)&&(var4>=0)", &Category_cat3 );
191     }
192    
193     // --- Book the MVA methods
194    
195     //TString dir = "weights/";
196     TString path = "Trainings/H130_WWTo2L2Nu/";
197     TString dir = path + "weights/";
198     TString outdir = path + "output/";
199    
200     TString prefix = "TMVAClassification";
201    
202     // Book method(s)
203     for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
204     if (it->second) {
205     TString methodName = TString(it->first) + TString(" method");
206     TString weightfile = dir + prefix + TString("_") + TString(it->first) + TString(".weights.xml");
207     reader->BookMVA( methodName, weightfile );
208     }
209     }
210    
211     // Book output histograms
212     UInt_t nbin = 100;
213     TH1F *histLk(0), *histLkD(0), *histLkPCA(0), *histLkKDE(0), *histLkMIX(0), *histPD(0), *histPDD(0);
214     TH1F *histPDPCA(0), *histPDEFoam(0), *histPDEFoamErr(0), *histPDEFoamSig(0), *histKNN(0), *histHm(0);
215     TH1F *histFi(0), *histFiG(0), *histFiB(0), *histLD(0), *histNn(0),*histNnbfgs(0),*histNnbnn(0);
216     TH1F *histNnC(0), *histNnT(0), *histBdt(0), *histBdtG(0), *histBdtD(0), *histRf(0), *histSVMG(0);
217     TH1F *histSVMP(0), *histSVML(0), *histFDAMT(0), *histFDAGA(0), *histCat(0), *histPBdt(0);
218    
219     if (Use["Likelihood"]) histLk = new TH1F( "MVA_Likelihood", "MVA_Likelihood", nbin, -1, 1 );
220     if (Use["LikelihoodD"]) histLkD = new TH1F( "MVA_LikelihoodD", "MVA_LikelihoodD", nbin, -1, 0.9999 );
221     if (Use["LikelihoodPCA"]) histLkPCA = new TH1F( "MVA_LikelihoodPCA", "MVA_LikelihoodPCA", nbin, -1, 1 );
222     if (Use["LikelihoodKDE"]) histLkKDE = new TH1F( "MVA_LikelihoodKDE", "MVA_LikelihoodKDE", nbin, -0.00001, 0.99999 );
223     if (Use["LikelihoodMIX"]) histLkMIX = new TH1F( "MVA_LikelihoodMIX", "MVA_LikelihoodMIX", nbin, 0, 1 );
224     if (Use["PDERS"]) histPD = new TH1F( "MVA_PDERS", "MVA_PDERS", nbin, 0, 1 );
225     if (Use["PDERSD"]) histPDD = new TH1F( "MVA_PDERSD", "MVA_PDERSD", nbin, 0, 1 );
226     if (Use["PDERSPCA"]) histPDPCA = new TH1F( "MVA_PDERSPCA", "MVA_PDERSPCA", nbin, 0, 1 );
227     if (Use["KNN"]) histKNN = new TH1F( "MVA_KNN", "MVA_KNN", nbin, 0, 1 );
228     if (Use["HMatrix"]) histHm = new TH1F( "MVA_HMatrix", "MVA_HMatrix", nbin, -0.95, 1.55 );
229     if (Use["Fisher"]) histFi = new TH1F( "MVA_Fisher", "MVA_Fisher", nbin, -4, 4 );
230     if (Use["FisherG"]) histFiG = new TH1F( "MVA_FisherG", "MVA_FisherG", nbin, -1, 1 );
231     if (Use["BoostedFisher"]) histFiB = new TH1F( "MVA_BoostedFisher", "MVA_BoostedFisher", nbin, -2, 2 );
232     if (Use["LD"]) histLD = new TH1F( "MVA_LD", "MVA_LD", nbin, -2, 2 );
233     if (Use["MLP"]) histNn = new TH1F( "MVA_MLP", "MVA_MLP", nbin, -1.25, 1.5 );
234     if (Use["MLPBFGS"]) histNnbfgs = new TH1F( "MVA_MLPBFGS", "MVA_MLPBFGS", nbin, -1.25, 1.5 );
235     if (Use["MLPBNN"]) histNnbnn = new TH1F( "MVA_MLPBNN", "MVA_MLPBNN", nbin, -1.25, 1.5 );
236     if (Use["CFMlpANN"]) histNnC = new TH1F( "MVA_CFMlpANN", "MVA_CFMlpANN", nbin, 0, 1 );
237     if (Use["TMlpANN"]) histNnT = new TH1F( "MVA_TMlpANN", "MVA_TMlpANN", nbin, -1.3, 1.3 );
238     if (Use["BDT"]) histBdt = new TH1F( "MVA_BDT", "MVA_BDT", nbin, -0.8, 0.8 );
239     if (Use["BDTD"]) histBdtD = new TH1F( "MVA_BDTD", "MVA_BDTD", nbin, -0.8, 0.8 );
240     if (Use["BDTG"]) histBdtG = new TH1F( "MVA_BDTG", "MVA_BDTG", nbin, -1.0, 1.0 );
241     if (Use["RuleFit"]) histRf = new TH1F( "MVA_RuleFit", "MVA_RuleFit", nbin, -2.0, 2.0 );
242     if (Use["SVM_Gauss"]) histSVMG = new TH1F( "MVA_SVM_Gauss", "MVA_SVM_Gauss", nbin, 0.0, 1.0 );
243     if (Use["SVM_Poly"]) histSVMP = new TH1F( "MVA_SVM_Poly", "MVA_SVM_Poly", nbin, 0.0, 1.0 );
244     if (Use["SVM_Lin"]) histSVML = new TH1F( "MVA_SVM_Lin", "MVA_SVM_Lin", nbin, 0.0, 1.0 );
245     if (Use["FDA_MT"]) histFDAMT = new TH1F( "MVA_FDA_MT", "MVA_FDA_MT", nbin, -2.0, 3.0 );
246     if (Use["FDA_GA"]) histFDAGA = new TH1F( "MVA_FDA_GA", "MVA_FDA_GA", nbin, -2.0, 3.0 );
247     if (Use["Category"]) histCat = new TH1F( "MVA_Category", "MVA_Category", nbin, -2., 2. );
248     if (Use["Plugin"]) histPBdt = new TH1F( "MVA_PBDT", "MVA_BDT", nbin, -0.8, 0.8 );
249    
250     // PDEFoam also returns per-event error, fill in histogram, and also fill significance
251     if (Use["PDEFoam"]) {
252     histPDEFoam = new TH1F( "MVA_PDEFoam", "MVA_PDEFoam", nbin, 0, 1 );
253     histPDEFoamErr = new TH1F( "MVA_PDEFoamErr", "MVA_PDEFoam error", nbin, 0, 1 );
254     histPDEFoamSig = new TH1F( "MVA_PDEFoamSig", "MVA_PDEFoam significance", nbin, 0, 10 );
255     }
256    
257     // Book example histogram for probability (the other methods are done similarly)
258     TH1F *probHistFi(0), *rarityHistFi(0);
259     if (Use["Fisher"]) {
260     probHistFi = new TH1F( "MVA_Fisher_Proba", "MVA_Fisher_Proba", nbin, 0, 1 );
261     rarityHistFi = new TH1F( "MVA_Fisher_Rarity", "MVA_Fisher_Rarity", nbin, 0, 1 );
262     }
263    
264     // Prepare input tree (this must be replaced by your data source)
265     // in this example, there is a toy tree with signal and one with background events
266     // we'll later on use only the "signal" events for the test in this example.
267     //
268     // TFile *input(0);
269     // TString fname = "./tmva_example.root";
270     // if (!gSystem->AccessPathName( fname ))
271     // input = TFile::Open( fname ); // check if file in local directory exists
272     // else
273     // input = TFile::Open( "http://root.cern.ch/files/tmva_class_example.root" ); // if not: download from ROOT server
274    
275     // if (!input) {
276     // std::cout << "ERROR: could not open data file" << std::endl;
277     // exit(1);
278     // }
279     // std::cout << "--- TMVAClassificationApp : Using input file: " << input->GetName() << std::endl;
280    
281    
282     char* iter = "v2";
283     TChain *ch = new TChain("Events");
284    
285     if( strcmp( samples.at(i) , "DY" ) == 0 ){
286     ch -> Add( Form("babies/%s/DYToMuMuM20_PU_testFinal_baby.root",iter) );
287     ch -> Add( Form("babies/%s/DYToMuMuM10To20_PU_testFinal_baby.root",iter) );
288     ch -> Add( Form("babies/%s/DYToEEM20_PU_testFinal_baby.root",iter) );
289     ch -> Add( Form("babies/%s/DYToEEM10To20_PU_testFinal_baby.root",iter) );
290     ch -> Add( Form("babies/%s/DYToTauTauM20_PU_testFinal_baby.root",iter) );
291     ch -> Add( Form("babies/%s/DYToTauTauM10To20_PU_testFinal_baby.root",iter) );
292     }
293     else if( strcmp( samples.at(i) , "Higgs130" ) == 0 ){
294     ch -> Add( Form("babies/%s/HToWWTo2L2NuM130_PU_testFinal_baby.root",iter) );
295     ch -> Add( Form("babies/%s/HToWWToLNuTauNuM130_PU_testFinal_baby.root",iter) );
296     ch -> Add( Form("babies/%s/HToWWTo2Tau2NuM130_PU_testFinal_baby.root",iter) );
297     }
298     else if( strcmp( samples.at(i) , "Higgs160" ) == 0 ){
299     ch -> Add( Form("babies/%s/HToWWTo2L2NuM160_PU_testFinal_baby.root",iter) );
300     ch -> Add( Form("babies/%s/HToWWToLNuTauNuM160_PU_testFinal_baby.root",iter) );
301     ch -> Add( Form("babies/%s/HToWWTo2Tau2NuM160_PU_testFinal_baby.root",iter) );
302     }
303     else if( strcmp( samples.at(i) , "Higgs200" ) == 0 ){
304     ch -> Add( Form("babies/%s/HToWWTo2L2NuM200_PU_testFinal_baby.root",iter) );
305     ch -> Add( Form("babies/%s/HToWWToLNuTauNuM200_PU_testFinal_baby.root",iter) );
306     ch -> Add( Form("babies/%s/HToWWTo2Tau2NuM200_PU_testFinal_baby.root",iter) );
307     }
308     else{
309     ch -> Add( Form("babies/%s/%s_PU_testFinal_baby.root",iter,samples.at(i)) );
310     }
311    
312    
313     // TChain *chbackground = new TChain("Events");
314     // if( sample == "WW" ) chbackground->Add(Form("babies/%s/WWTo2L2Nu_PU_testFinal_baby.root",iter));
315     // chbackground->Add(Form("babies/%s/GluGluToWWTo4L_PU_testFinal_baby.root",iter));
316     // chbackground->Add(Form("babies/%s/WZ_PU_testFinal_baby.root",iter));
317     // chbackground->Add(Form("babies/%s/ZZ_PU_testFinal_baby.root",iter));
318     // chbackground->Add(Form("babies/%s/TTJets_PU_testFinal_baby.root",iter));
319     // chbackground->Add(Form("babies/%s/tW_PU_testFinal_baby.root",iter));
320     // chbackground->Add(Form("babies/%s/WJetsToLNu_PU_testFinal_baby.root",iter));
321     // chbackground->Add(Form("babies/%s/DYToMuMuM20_PU_testFinal_baby.root",iter) );
322     // chbackground->Add(Form("babies/%s/DYToMuMuM10To20_PU_testFinal_baby.root",iter) );
323     // chbackground->Add(Form("babies/%s/DYToEEM20_PU_testFinal_baby.root",iter) );
324     // chbackground->Add(Form("babies/%s/DYToEEM10To20_PU_testFinal_baby.root",iter) );
325     // chbackground->Add(Form("babies/%s/DYToTauTauM20_PU_testFinal_baby.root",iter) );
326     // chbackground->Add(Form("babies/%s/DYToTauTauM10To20_PU_testFinal_baby.root",iter) );
327    
328     // int mH = 130;
329     // //int mH = 160;
330     // //int mH = 200;
331    
332     // TChain *chsignal = new TChain("Events");
333    
334     // if( mH == 130 ){
335     // chsignal->Add(Form("babies/%s/HToWWTo2L2NuM130_PU_testFinal_baby.root",iter));
336     // chsignal->Add(Form("babies/%s/HToWWToLNuTauNuM130_PU_testFinal_baby.root",iter));
337     // chsignal->Add(Form("babies/%s/HToWWTo2Tau2NuM130_PU_testFinal_baby.root",iter));
338     // }
339     // else if( mH == 160 ){
340     // chsignal->Add(Form("babies/%s/HToWWTo2L2NuM160_PU_testFinal_baby.root",iter));
341     // chsignal->Add(Form("babies/%s/HToWWToLNuTauNuM160_PU_testFinal_baby.root",iter));
342     // chsignal->Add(Form("babies/%s/HToWWTo2Tau2NuM160_PU_testFinal_baby.root",iter));
343     // }
344     // else if( mH == 200 ){
345     // chsignal->Add(Form("babies/%s/HToWWTo2L2NuM200_PU_testFinal_baby.root",iter));
346     // chsignal->Add(Form("babies/%s/HToWWToLNuTauNuM200_PU_testFinal_baby.root",iter));
347     // chsignal->Add(Form("babies/%s/HToWWTo2Tau2NuM200_PU_testFinal_baby.root",iter));
348     // }
349     // else{
350     // std::cout << "Error, unrecognized higgs mass " << mH << " GeV, quitting" << std::endl;
351     // exit(0);
352     //}
353    
354    
355    
356    
357     // --- Event loop
358    
359     // Prepare the event tree
360     // - here the variable names have to corresponds to your tree
361     // - you can use the same variables as above which is slightly faster,
362     // but of course you can use different ones and copy the values inside the event loop
363     //
364    
365    
366    
367    
368     // TTree* theTree = (TTree*)input->Get("TreeS");
369     // Float_t userVar1, userVar2;
370     // theTree->SetBranchAddress( "var1", &userVar1 );
371     // theTree->SetBranchAddress( "var2", &userVar2 );
372     // theTree->SetBranchAddress( "var3", &var3 );
373     // theTree->SetBranchAddress( "var4", &var4 );
374    
375     TTree *theTree = (TTree*) ch;
376    
377     std::cout << "--- Using input files: -------------------" << std::endl;
378    
379     TObjArray *listOfFiles = ch->GetListOfFiles();
380     TIter fileIter(listOfFiles);
381     TChainElement* currentFile = 0;
382    
383     while((currentFile = (TChainElement*)fileIter.Next())) {
384     std::cout << currentFile->GetTitle() << std::endl;
385     }
386    
387     //TTree *theTree = (TTree*) chsignal;
388     //TTree *theTree = (TTree*) chbackground;
389     Float_t lephard_pt_;
390     Float_t lepsoft_pt_;
391     Float_t dil_dphi_;
392     Float_t dil_mass_;
393     Int_t event_type_;
394     Float_t met_projpt_;
395     Int_t jets_num_;
396     Int_t extralep_num_;
397     Int_t lowptbtags_num_;
398     Int_t softmu_num_;
399     Float_t event_scale1fb_;
400    
401     theTree->SetBranchAddress( "lephard_pt_" , &lephard_pt_ );
402     theTree->SetBranchAddress( "lepsoft_pt_" , &lepsoft_pt_ );
403     theTree->SetBranchAddress( "dil_dphi_" , &dil_dphi_ );
404     theTree->SetBranchAddress( "dil_mass_" , &dil_mass_ );
405     theTree->SetBranchAddress( "event_type_" , &event_type_ );
406     theTree->SetBranchAddress( "met_projpt_" , &met_projpt_ );
407     theTree->SetBranchAddress( "jets_num_" , &jets_num_ );
408     theTree->SetBranchAddress( "extralep_num_" , &extralep_num_ );
409     theTree->SetBranchAddress( "lowptbtags_num_" , &lowptbtags_num_ );
410     theTree->SetBranchAddress( "softmu_num_" , &softmu_num_ );
411     theTree->SetBranchAddress( "event_scale1fb_" , &event_scale1fb_ );
412    
413     // Efficiency calculator for cut method
414     Int_t nSelCutsGA = 0;
415     Double_t effS = 0.7;
416    
417     std::vector<Float_t> vecVar(4); // vector for EvaluateMVA tests
418    
419     std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
420     TStopwatch sw;
421     sw.Start();
422    
423     int npass = 0;
424    
425     for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
426    
427     if (ievt%1000 == 0) std::cout << "--- ... Processing event: " << ievt << std::endl;
428    
429     theTree->GetEntry(ievt);
430    
431     if( event_type_ == 2 && met_projpt_ < 20. ) continue;
432     if( event_type_ != 2 && met_projpt_ < 35. ) continue;
433     if( lephard_pt_ < 20. ) continue;
434     if( lepsoft_pt_ < 10. ) continue;
435     if( jets_num_ > 0 ) continue;
436     if( extralep_num_ > 0 ) continue;
437     if( lowptbtags_num_ > 0 ) continue;
438     if( softmu_num_ > 0 ) continue;
439     if( dil_mass_ < 12. ) continue;
440    
441     float weight = event_scale1fb_ * 0.0355;
442    
443     lephard_pt = lephard_pt_;
444     lepsoft_pt = lepsoft_pt_;
445     dil_mass = dil_mass_;
446     dil_dphi = dil_dphi_;
447    
448     npass++;
449     // var1 = userVar1 + userVar2;
450     // var2 = userVar1 - userVar2;
451    
452     // --- Return the MVA outputs and fill into histograms
453    
454     if (Use["CutsGA"]) {
455     // Cuts is a special case: give the desired signal efficienciy
456     Bool_t passed = reader->EvaluateMVA( "CutsGA method", effS );
457     if (passed) nSelCutsGA++;
458     }
459    
460     if (Use["Likelihood" ]) histLk ->Fill( reader->EvaluateMVA( "Likelihood method" ) , weight);
461     if (Use["LikelihoodD" ]) histLkD ->Fill( reader->EvaluateMVA( "LikelihoodD method" ) , weight);
462     if (Use["LikelihoodPCA"]) histLkPCA ->Fill( reader->EvaluateMVA( "LikelihoodPCA method" ) , weight);
463     if (Use["LikelihoodKDE"]) histLkKDE ->Fill( reader->EvaluateMVA( "LikelihoodKDE method" ) , weight);
464     if (Use["LikelihoodMIX"]) histLkMIX ->Fill( reader->EvaluateMVA( "LikelihoodMIX method" ) , weight);
465     if (Use["PDERS" ]) histPD ->Fill( reader->EvaluateMVA( "PDERS method" ) , weight);
466     if (Use["PDERSD" ]) histPDD ->Fill( reader->EvaluateMVA( "PDERSD method" ) , weight);
467     if (Use["PDERSPCA" ]) histPDPCA ->Fill( reader->EvaluateMVA( "PDERSPCA method" ) , weight);
468     if (Use["KNN" ]) histKNN ->Fill( reader->EvaluateMVA( "KNN method" ) , weight);
469     if (Use["HMatrix" ]) histHm ->Fill( reader->EvaluateMVA( "HMatrix method" ) , weight);
470     if (Use["Fisher" ]) histFi ->Fill( reader->EvaluateMVA( "Fisher method" ) , weight);
471     if (Use["FisherG" ]) histFiG ->Fill( reader->EvaluateMVA( "FisherG method" ) , weight);
472     if (Use["BoostedFisher"]) histFiB ->Fill( reader->EvaluateMVA( "BoostedFisher method" ) , weight);
473     if (Use["LD" ]) histLD ->Fill( reader->EvaluateMVA( "LD method" ) , weight);
474     if (Use["MLP" ]) histNn ->Fill( reader->EvaluateMVA( "MLP method" ) , weight);
475     if (Use["MLPBFGS" ]) histNnbfgs ->Fill( reader->EvaluateMVA( "MLPBFGS method" ) , weight);
476     if (Use["MLPBNN" ]) histNnbnn ->Fill( reader->EvaluateMVA( "MLPBNN method" ) , weight);
477     if (Use["CFMlpANN" ]) histNnC ->Fill( reader->EvaluateMVA( "CFMlpANN method" ) , weight);
478     if (Use["TMlpANN" ]) histNnT ->Fill( reader->EvaluateMVA( "TMlpANN method" ) , weight);
479     if (Use["BDT" ]) histBdt ->Fill( reader->EvaluateMVA( "BDT method" ) , weight);
480     if (Use["BDTD" ]) histBdtD ->Fill( reader->EvaluateMVA( "BDTD method" ) , weight);
481     if (Use["BDTG" ]) histBdtG ->Fill( reader->EvaluateMVA( "BDTG method" ) , weight);
482     if (Use["RuleFit" ]) histRf ->Fill( reader->EvaluateMVA( "RuleFit method" ) , weight);
483     if (Use["SVM_Gauss" ]) histSVMG ->Fill( reader->EvaluateMVA( "SVM_Gauss method" ) , weight);
484     if (Use["SVM_Poly" ]) histSVMP ->Fill( reader->EvaluateMVA( "SVM_Poly method" ) , weight);
485     if (Use["SVM_Lin" ]) histSVML ->Fill( reader->EvaluateMVA( "SVM_Lin method" ) , weight);
486     if (Use["FDA_MT" ]) histFDAMT ->Fill( reader->EvaluateMVA( "FDA_MT method" ) , weight);
487     if (Use["FDA_GA" ]) histFDAGA ->Fill( reader->EvaluateMVA( "FDA_GA method" ) , weight);
488     if (Use["Category" ]) histCat ->Fill( reader->EvaluateMVA( "Category method" ) , weight);
489     if (Use["Plugin" ]) histPBdt ->Fill( reader->EvaluateMVA( "P_BDT method" ) , weight);
490    
491     // Retrieve also per-event error
492     if (Use["PDEFoam"]) {
493     Double_t val = reader->EvaluateMVA( "PDEFoam method" );
494     Double_t err = reader->GetMVAError();
495     histPDEFoam ->Fill( val );
496     histPDEFoamErr->Fill( err );
497     if (err>1.e-50) histPDEFoamSig->Fill( val/err , weight);
498     }
499    
500     // Retrieve probability instead of MVA output
501     if (Use["Fisher"]) {
502     probHistFi ->Fill( reader->GetProba ( "Fisher method" ) , weight);
503     rarityHistFi->Fill( reader->GetRarity( "Fisher method" ) , weight);
504     }
505     }
506    
507     std::cout << npass << " events passing selection" << std::endl;
508    
509     // Get elapsed time
510     sw.Stop();
511     std::cout << "--- End of event loop: "; sw.Print();
512    
513     // Get efficiency for cuts classifier
514     if (Use["CutsGA"]) std::cout << "--- Efficiency for CutsGA method: " << double(nSelCutsGA)/theTree->GetEntries()
515     << " (for a required signal efficiency of " << effS << ")" << std::endl;
516    
517     if (Use["CutsGA"]) {
518    
519     // test: retrieve cuts for particular signal efficiency
520     // CINT ignores dynamic_casts so we have to use a cuts-secific Reader function to acces the pointer
521     TMVA::MethodCuts* mcuts = reader->FindCutsMVA( "CutsGA method" ) ;
522    
523     if (mcuts) {
524     std::vector<Double_t> cutsMin;
525     std::vector<Double_t> cutsMax;
526     mcuts->GetCuts( 0.7, cutsMin, cutsMax );
527     std::cout << "--- -------------------------------------------------------------" << std::endl;
528     std::cout << "--- Retrieve cut values for signal efficiency of 0.7 from Reader" << std::endl;
529     for (UInt_t ivar=0; ivar<cutsMin.size(); ivar++) {
530     std::cout << "... Cut: "
531     << cutsMin[ivar]
532     << " < \""
533     << mcuts->GetInputVar(ivar)
534     << "\" <= "
535     << cutsMax[ivar] << std::endl;
536     }
537     std::cout << "--- -------------------------------------------------------------" << std::endl;
538     }
539     }
540    
541     // --- Write histograms
542     cout << "dir " << dir << endl;
543     char* mydir = outdir;
544     TFile *target = new TFile( Form("%s/%s.root",mydir,samples.at(i) ) ,"RECREATE" );
545     cout << "Writing to file " << Form("%s/%s.root",mydir,samples.at(i) ) << endl;
546    
547     if (Use["Likelihood" ]) histLk ->Write();
548     if (Use["LikelihoodD" ]) histLkD ->Write();
549     if (Use["LikelihoodPCA"]) histLkPCA ->Write();
550     if (Use["LikelihoodKDE"]) histLkKDE ->Write();
551     if (Use["LikelihoodMIX"]) histLkMIX ->Write();
552     if (Use["PDERS" ]) histPD ->Write();
553     if (Use["PDERSD" ]) histPDD ->Write();
554     if (Use["PDERSPCA" ]) histPDPCA ->Write();
555     if (Use["KNN" ]) histKNN ->Write();
556     if (Use["HMatrix" ]) histHm ->Write();
557     if (Use["Fisher" ]) histFi ->Write();
558     if (Use["FisherG" ]) histFiG ->Write();
559     if (Use["BoostedFisher"]) histFiB ->Write();
560     if (Use["LD" ]) histLD ->Write();
561     if (Use["MLP" ]) histNn ->Write();
562     if (Use["MLPBFGS" ]) histNnbfgs ->Write();
563     if (Use["MLPBNN" ]) histNnbnn ->Write();
564     if (Use["CFMlpANN" ]) histNnC ->Write();
565     if (Use["TMlpANN" ]) histNnT ->Write();
566     if (Use["BDT" ]) histBdt ->Write();
567     if (Use["BDTD" ]) histBdtD ->Write();
568     if (Use["BDTG" ]) histBdtG ->Write();
569     if (Use["RuleFit" ]) histRf ->Write();
570     if (Use["SVM_Gauss" ]) histSVMG ->Write();
571     if (Use["SVM_Poly" ]) histSVMP ->Write();
572     if (Use["SVM_Lin" ]) histSVML ->Write();
573     if (Use["FDA_MT" ]) histFDAMT ->Write();
574     if (Use["FDA_GA" ]) histFDAGA ->Write();
575     if (Use["Category" ]) histCat ->Write();
576     if (Use["Plugin" ]) histPBdt ->Write();
577    
578     // Write also error and significance histos
579     if (Use["PDEFoam"]) { histPDEFoam->Write(); histPDEFoamErr->Write(); histPDEFoamSig->Write(); }
580    
581     // Write also probability hists
582     if (Use["Fisher"]) { if (probHistFi != 0) probHistFi->Write(); if (rarityHistFi != 0) rarityHistFi->Write(); }
583     target->Close();
584    
585     delete reader;
586    
587     std::cout << "==> TMVAClassificationApplication is done with sample " << samples.at(i) << endl << std::endl;
588     }
589     }