ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/UserCode/benhoob/HWW/TMVARegressionApplication.C
Revision: 1.1
Committed: Mon Feb 14 12:39:14 2011 UTC (14 years, 3 months ago) by benhoob
Content type: text/plain
Branch: MAIN
CVS Tags: HEAD
Log Message:
Initial commit

File Contents

# Content
1 /**********************************************************************************
2 * Project : TMVA - a Root-integrated toolkit for multivariate data analysis *
3 * Package : TMVA *
4 * Exectuable: TMVARegressionApplication *
5 * *
6 * This macro provides a simple example on how to use the trained regression MVAs *
7 * within an analysis module *
8 **********************************************************************************/
9
10 #include <cstdlib>
11 #include <vector>
12 #include <iostream>
13 #include <map>
14 #include <string>
15
16 #include "TFile.h"
17 #include "TTree.h"
18 #include "TString.h"
19 #include "TSystem.h"
20 #include "TROOT.h"
21 #include "TStopwatch.h"
22
23 #if not defined(__CINT__) || defined(__MAKECINT__)
24 #include "TMVA/Tools.h"
25 #include "TMVA/Reader.h"
26 #endif
27
28 using namespace TMVA;
29
30 void TMVARegressionApplication( TString myMethodList = "" )
31 {
32 //---------------------------------------------------------------
33 // This loads the library
34 TMVA::Tools::Instance();
35
36 // Default MVA methods to be trained + tested
37 std::map<std::string,int> Use;
38
39 // --- Mutidimensional likelihood and Nearest-Neighbour methods
40 Use["PDERS"] = 0;
41 Use["PDEFoam"] = 1;
42 Use["KNN"] = 1;
43 //
44 // --- Linear Discriminant Analysis
45 Use["LD"] = 1;
46 //
47 // --- Function Discriminant analysis
48 Use["FDA_GA"] = 1;
49 Use["FDA_MC"] = 0;
50 Use["FDA_MT"] = 0;
51 Use["FDA_GAMT"] = 0;
52 //
53 // --- Neural Network
54 Use["MLP"] = 1;
55 //
56 // --- Support Vector Machine
57 Use["SVM"] = 0;
58 //
59 // --- Boosted Decision Trees
60 Use["BDT"] = 0;
61 Use["BDTG"] = 1;
62 // ---------------------------------------------------------------
63
64 std::cout << std::endl;
65 std::cout << "==> Start TMVARegressionApplication" << std::endl;
66
67 // Select methods (don't look at this code - not of interest)
68 if (myMethodList != "") {
69 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) it->second = 0;
70
71 std::vector<TString> mlist = gTools().SplitString( myMethodList, ',' );
72 for (UInt_t i=0; i<mlist.size(); i++) {
73 std::string regMethod(mlist[i]);
74
75 if (Use.find(regMethod) == Use.end()) {
76 std::cout << "Method \"" << regMethod << "\" not known in TMVA under this name. Choose among the following:" << std::endl;
77 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) std::cout << it->first << " ";
78 std::cout << std::endl;
79 return;
80 }
81 Use[regMethod] = 1;
82 }
83 }
84
85 // --------------------------------------------------------------------------------------------------
86
87 // --- Create the Reader object
88
89 TMVA::Reader *reader = new TMVA::Reader( "!Color:!Silent" );
90
91 // Create a set of variables and declare them to the reader
92 // - the variable names MUST corresponds in name and type to those given in the weight file(s) used
93 Float_t var1, var2;
94 reader->AddVariable( "var1", &var1 );
95 reader->AddVariable( "var2", &var2 );
96
97 // Spectator variables declared in the training have to be added to the reader, too
98 Float_t spec1,spec2;
99 reader->AddSpectator( "spec1:=var1*2", &spec1 );
100 reader->AddSpectator( "spec2:=var1*3", &spec2 );
101
102 // --- Book the MVA methods
103
104 TString dir = "weights/";
105 TString prefix = "TMVARegression";
106
107 // Book method(s)
108 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
109 if (it->second) {
110 TString methodName = it->first + " method";
111 TString weightfile = dir + prefix + "_" + TString(it->first) + ".weights.xml";
112 reader->BookMVA( methodName, weightfile );
113 }
114 }
115
116 // Book output histograms
117 TH1* hists[100];
118 Int_t nhists = -1;
119 for (std::map<std::string,int>::iterator it = Use.begin(); it != Use.end(); it++) {
120 TH1* h = new TH1F( it->first.c_str(), TString(it->first) + " method", 100, -100, 600 );
121 if (it->second) hists[++nhists] = h;
122 }
123 nhists++;
124
125 // Prepare input tree (this must be replaced by your data source)
126 // in this example, there is a toy tree with signal and one with background events
127 // we'll later on use only the "signal" events for the test in this example.
128 //
129 TFile *input(0);
130 TString fname = "./tmva_reg_example.root";
131 if (!gSystem->AccessPathName( fname )) {
132 input = TFile::Open( fname ); // check if file in local directory exists
133 }
134 else {
135 input = TFile::Open( "http://root.cern.ch/files/tmva_reg_example.root" ); // if not: download from ROOT server
136 }
137
138 if (!input) {
139 std::cout << "ERROR: could not open data file" << std::endl;
140 exit(1);
141 }
142 std::cout << "--- TMVARegressionApp : Using input file: " << input->GetName() << std::endl;
143
144 // --- Event loop
145
146 // Prepare the tree
147 // - here the variable names have to corresponds to your tree
148 // - you can use the same variables as above which is slightly faster,
149 // but of course you can use different ones and copy the values inside the event loop
150 //
151 TTree* theTree = (TTree*)input->Get("TreeR");
152 std::cout << "--- Select signal sample" << std::endl;
153 theTree->SetBranchAddress( "var1", &var1 );
154 theTree->SetBranchAddress( "var2", &var2 );
155
156 std::cout << "--- Processing: " << theTree->GetEntries() << " events" << std::endl;
157 TStopwatch sw;
158 sw.Start();
159 for (Long64_t ievt=0; ievt<theTree->GetEntries();ievt++) {
160
161 if (ievt%1000 == 0) {
162 std::cout << "--- ... Processing event: " << ievt << std::endl;
163 }
164
165 theTree->GetEntry(ievt);
166
167 // Retrieve the MVA target values (regression outputs) and fill into histograms
168 // NOTE: EvaluateRegression(..) returns a vector for multi-target regression
169
170 for (Int_t ih=0; ih<nhists; ih++) {
171 TString title = hists[ih]->GetTitle();
172 Float_t val = (reader->EvaluateRegression( title ))[0];
173 hists[ih]->Fill( val );
174 }
175 }
176 sw.Stop();
177 std::cout << "--- End of event loop: "; sw.Print();
178
179 // --- Write histograms
180
181 TFile *target = new TFile( "TMVARegApp.root","RECREATE" );
182 for (Int_t ih=0; ih<nhists; ih++) hists[ih]->Write();
183 target->Close();
184
185 std::cout << "--- Created root file: \"" << target->GetName()
186 << "\" containing the MVA output histograms" << std::endl;
187
188 delete reader;
189
190 std::cout << "==> TMVARegressionApplication is done!" << std::endl << std::endl;
191 }