ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/UserCode/LJMet/MultivariateAnalysis/macros/TMVAnalysis.C
Revision: 1.2
Committed: Fri Apr 10 19:54:00 2009 UTC (16 years, 1 month ago) by kukartse
Content type: text/plain
Branch: MAIN
CVS Tags: V00-01-15, V00-01-14, V00-01-13, V00-01-12
Changes since 1.1: +22 -17 lines
Log Message:
*** empty log message ***

File Contents

# User Rev Content
1 kukartse 1.2 // @(#)root/tmva $Id: TMVAnalysis.C,v 1.1 2009/01/21 18:36:08 kukartse Exp $
2 kukartse 1.1 /**********************************************************************************
3     * Project : TMVA - a Root-integrated toolkit for multivariate data analysis *
4     * Package : TMVA *
5     * Root Macro: TMVAnalysis *
6     * *
7     * This macro provides examples for the training and testing of all the *
8     * TMVA classifiers. *
9     * *
10     * As input data is used a toy-MC sample consisting of four Gaussian-distributed *
11     * and linearly correlated input variables. *
12     * *
13     * The methods to be used can be switched on and off by means of booleans, or *
14     * via the prompt command, for example: *
15     * *
16     * root -l TMVAnalysis.C\(\"Fisher,Likelihood\"\) *
17     * *
18     * (note that the backslashes are mandatory) *
19     * *
20     * The output file "TMVA.root" can be analysed with the use of dedicated *
21     * macros (simply say: root -l <macro.C>), which can be conveniently *
22     * invoked through a GUI that will appear at the end of the run of this macro. *
23     **********************************************************************************/
24    
25     #include <iostream>
26    
27     #include "TCut.h"
28     #include "TFile.h"
29     #include "TSystem.h"
30     #include "TTree.h"
31     // requires links
32 kukartse 1.2 #include "TMVA/Factory.h"
33     #include "TMVA/Tools.h"
34     #include "TMVA/Config.h"
35 kukartse 1.1
36     #include "TMVAGui.C"
37 kukartse 1.2
38 kukartse 1.1 // ---------------------------------------------------------------
39     // choose MVA methods to be trained + tested
40     Bool_t Use_Cuts = 0;
41     Bool_t Use_CutsD = 0;
42     Bool_t Use_CutsGA = 1;
43     // ---
44     Bool_t Use_Likelihood = 1;
45     Bool_t Use_LikelihoodD = 0; // the "D" extension indicates decorrelated input variables (see option strings)
46     Bool_t Use_LikelihoodPCA = 1; // the "PCA" extension indicates PCA-transformed input variables (see option strings)
47     Bool_t Use_LikelihoodKDE = 0;
48     Bool_t Use_LikelihoodMIX = 0;
49     // ---
50     Bool_t Use_PDERS = 1;
51     Bool_t Use_PDERSD = 0;
52     Bool_t Use_PDERSPCA = 0;
53     Bool_t Use_KNN = 1;
54     // ---
55     Bool_t Use_HMatrix = 1;
56     Bool_t Use_Fisher = 1;
57     // ---
58     Bool_t Use_FDA_GA = 0;
59     Bool_t Use_FDA_MC = 0;
60     Bool_t Use_FDA_SA = 0;
61     Bool_t Use_FDA_MT = 1;
62     Bool_t Use_FDA_GAMT = 0;
63     Bool_t Use_FDA_MCMT = 0;
64     // ---
65     Bool_t Use_MLP = 1; // this is the recommended ANN
66     Bool_t Use_CFMlpANN = 0;
67     Bool_t Use_TMlpANN = 0;
68     // ---
69     Bool_t Use_BDT = 1;
70     Bool_t Use_BDTD = 0;
71     // ---
72     Bool_t Use_RuleFitTMVA = 1;
73     Bool_t Use_RuleFitJF = 0;
74     // ---
75     Bool_t Use_SVM_Gauss = 1;
76     Bool_t Use_SVM_Poly = 0;
77     Bool_t Use_SVM_Lin = 0;
78     // ---------------------------------------------------------------
79    
80     // read input data file with ascii format (otherwise ROOT) ?
81     Bool_t ReadDataFromAsciiIFormat = kFALSE;
82    
83     void TMVAnalysis( TString myMethodList = "" )
84     {
85     // explicit loading of the shared libTMVA is done in TMVAlogon.C, defined in .rootrc
86     // if you use your private .rootrc, or run from a different directory, please copy the
87     // corresponding lines from .rootrc
88    
89     // methods to be processed can be given as an argument; use format:
90     //
91     // mylinux~> root -l TMVAnalysis.C\(\"myMethod1,myMethod2,myMethod3\"\)
92     //
93     TList* mlist = TMVA::Tools::ParseFormatLine( myMethodList, " :," );
94    
95     if (mlist->GetSize()>0) {
96     Use_CutsGA = Use_CutsD = Use_Cuts
97     = Use_LikelihoodKDE = Use_LikelihoodMIX = Use_LikelihoodPCA = Use_LikelihoodD = Use_Likelihood
98     = Use_PDERSPCA = Use_PDERSD = Use_PDERS
99     = Use_KNN
100     = Use_MLP = Use_CFMlpANN = Use_TMlpANN
101     = Use_HMatrix = Use_Fisher = Use_BDTD = Use_BDT
102     = Use_RuleFitTMVA = Use_RuleFitJF
103     = Use_SVM_Gauss = Use_SVM_Poly = Use_SVM_Lin
104     = Use_FDA_GA = Use_FDA_MC = Use_FDA_SA = Use_FDA_MT = Use_FDA_GAMT = Use_FDA_MCMT
105     = 0;
106    
107     if (mlist->FindObject( "Cuts" ) != 0) Use_Cuts = 1;
108     if (mlist->FindObject( "CutsD" ) != 0) Use_CutsD = 1;
109     if (mlist->FindObject( "CutsGA" ) != 0) Use_CutsGA = 1;
110     if (mlist->FindObject( "Likelihood" ) != 0) Use_Likelihood = 1;
111     if (mlist->FindObject( "LikelihoodD" ) != 0) Use_LikelihoodD = 1;
112     if (mlist->FindObject( "LikelihoodPCA" ) != 0) Use_LikelihoodPCA = 1;
113     if (mlist->FindObject( "LikelihoodKDE" ) != 0) Use_LikelihoodKDE = 1;
114     if (mlist->FindObject( "LikelihoodMIX" ) != 0) Use_LikelihoodMIX = 1;
115     if (mlist->FindObject( "PDERSPCA" ) != 0) Use_PDERSPCA = 1;
116     if (mlist->FindObject( "PDERSD" ) != 0) Use_PDERSD = 1;
117     if (mlist->FindObject( "PDERS" ) != 0) Use_PDERS = 1;
118     if (mlist->FindObject( "KNN" ) != 0) Use_KNN = 1;
119     if (mlist->FindObject( "HMatrix" ) != 0) Use_HMatrix = 1;
120     if (mlist->FindObject( "Fisher" ) != 0) Use_Fisher = 1;
121     if (mlist->FindObject( "MLP" ) != 0) Use_MLP = 1;
122     if (mlist->FindObject( "CFMlpANN" ) != 0) Use_CFMlpANN = 1;
123     if (mlist->FindObject( "TMlpANN" ) != 0) Use_TMlpANN = 1;
124     if (mlist->FindObject( "BDTD" ) != 0) Use_BDTD = 1;
125     if (mlist->FindObject( "BDT" ) != 0) Use_BDT = 1;
126     if (mlist->FindObject( "RuleFitJF" ) != 0) Use_RuleFitJF = 1;
127     if (mlist->FindObject( "RuleFitTMVA" ) != 0) Use_RuleFitTMVA = 1;
128     if (mlist->FindObject( "SVM_Gauss" ) != 0) Use_SVM_Gauss = 1;
129     if (mlist->FindObject( "SVM_Poly" ) != 0) Use_SVM_Poly = 1;
130     if (mlist->FindObject( "SVM_Lin" ) != 0) Use_SVM_Lin = 1;
131     if (mlist->FindObject( "FDA_MC" ) != 0) Use_FDA_MC = 1;
132     if (mlist->FindObject( "FDA_GA" ) != 0) Use_FDA_GA = 1;
133     if (mlist->FindObject( "FDA_SA" ) != 0) Use_FDA_SA = 1;
134     if (mlist->FindObject( "FDA_MT" ) != 0) Use_FDA_MT = 1;
135     if (mlist->FindObject( "FDA_GAMT" ) != 0) Use_FDA_GAMT = 1;
136     if (mlist->FindObject( "FDA_MCMT" ) != 0) Use_FDA_MCMT = 1;
137    
138     delete mlist;
139     }
140    
141     std::cout << "Start Test TMVAnalysis" << std::endl
142     << "======================" << std::endl
143     << std::endl;
144     std::cout << "Testing all standard methods may take about 10 minutes of running..." << std::endl;
145    
146     // Create a new root output file.
147     TString outfileName( "TMVA.root" );
148     TFile* outputFile = TFile::Open( outfileName, "RECREATE" );
149    
150     // Create the factory object. Later you can choose the methods
151     // whose performance you'd like to investigate. The factory will
152     // then run the performance analysis for you.
153     //
154     // The first argument is the base of the name of all the
155     // weightfiles in the directory weight/
156     //
157     // The second argument is the output file for the training results
158     TMVA::Factory *factory = new TMVA::Factory( "TMVAnalysis", outputFile, Form("!V:%sColor", gROOT->IsBatch()?"!":"") );
159    
160     // if you wish to modify default settings
161     // (please check "src/Config.h" to see all available global options)
162     // (TMVA::gConfig().GetVariablePlotting()).fTimesRMS = 8.0;
163     // (TMVA::gConfig().GetIONames()).fWeightFileDir = "myWeightDirectory";
164    
165     if (ReadDataFromAsciiIFormat) {
166     // load the signal and background event samples from ascii files
167     // format in file must be:
168     // var1/F:var2/F:var3/F:var4/F
169     // 0.04551 0.59923 0.32400 -0.19170
170     // ...
171    
172     TString datFileS = "data/toy_sig_lincorr.dat";
173     TString datFileB = "data/toy_bkg_lincorr.dat";
174     if (!factory->SetInputTrees( datFileS, datFileB )) exit(1);
175     }
176     else {
177     // load the signal and background event samples from ROOT trees
178     TFile *input(0);
179 kukartse 1.2 TString fname = "./tmva_training.root";
180 kukartse 1.1 if (!gSystem->AccessPathName( fname )) {
181     // first we try to find tmva_example.root in the local directory
182     std::cout << "--- TMVAnalysis : accessing " << fname << std::endl;
183     input = TFile::Open( fname );
184     }
185     else {
186     // second we try accessing the file via the web from
187     // http://root.cern.ch/files/tmva_example.root
188     std::cout << "--- TMVAnalysis : accessing tmva_example.root file from http://root.cern.ch/files" << std::endl;
189     std::cout << "--- TMVAnalysis : for faster startup you may consider downloading it into you local directory" << std::endl;
190     input = TFile::Open( "http://root.cern.ch/files/tmva_example.root" );
191     }
192    
193     if (!input) {
194     std::cout << "ERROR: could not open data file" << std::endl;
195     exit(1);
196     }
197    
198 kukartse 1.2 TTree *signal = (TTree*)input->Get("sig");
199     TTree *background = (TTree*)input->Get("bg");
200 kukartse 1.1
201     // global event weights (see below for setting event-wise weights)
202     Double_t signalWeight = 1.0;
203     Double_t backgroundWeight = 1.0;
204    
205     factory->AddSignalTree ( signal, signalWeight );
206     factory->AddBackgroundTree( background, backgroundWeight );
207     }
208    
209     // Define the input variables that shall be used for the MVA training
210     // note that you may also use variable expressions, such as: "3*var1/var2*abs(var3)"
211     // [all types of expressions that can also be parsed by TTree::Draw( "expression" )]
212 kukartse 1.2 factory->AddVariable("var1", 'F');
213     factory->AddVariable("var2", 'F');
214 kukartse 1.1 factory->AddVariable("var1+var2", 'F');
215    
216     // This would set individual event weights (the variables defined in the
217     // expression need to exist in the original TTree)
218 kukartse 1.2 factory->SetWeightExpression("event_weight");
219 kukartse 1.1
220     // Apply additional cuts on the signal and background sample.
221 kukartse 1.2 TCut mycut = "";
222 kukartse 1.1
223     // tell the factory to use all remaining events in the trees after training for testing:
224 kukartse 1.2 TString split_opt = "NSigTrain=3000:NBkgTrain=3000:SplitMode=Random:NormMode=NumEvents:!V";
225     factory->PrepareTrainingAndTestTree( mycut, split_opt );
226 kukartse 1.1
227     // If no numbers of events are given, half of the events in the tree are used for training, and
228     // the other half for testing:
229     // factory->PrepareTrainingAndTestTree( mycut, "SplitMode=random:!V" );
230     // To also specify the number of testing events, use:
231     // factory->PrepareTrainingAndTestTree( mycut,
232     // "NSigTrain=3000:NBkgTrain=3000:NSigTest=3000:NBkgTest=3000:SplitMode=Random:!V" );
233    
234     // ---- Book MVA methods
235     //
236     // please lookup the various method configuration options in the corresponding cxx files, eg:
237     // src/MethoCuts.cxx, etc.
238     // it is possible to preset ranges in the option string in which the cut optimisation should be done:
239     // "...:CutRangeMin[2]=-1:CutRangeMax[2]=1"...", where [2] is the third input variable
240    
241     // Cut optimisation
242     if (Use_Cuts)
243     factory->BookMethod( TMVA::Types::kCuts, "Cuts",
244     "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart" );
245    
246     if (Use_CutsD)
247     factory->BookMethod( TMVA::Types::kCuts, "CutsD",
248     "!H:!V:FitMethod=MC:EffSel:SampleSize=200000:VarProp=FSmart:VarTransform=Decorrelate" );
249    
250     if (Use_CutsGA)
251     factory->BookMethod( TMVA::Types::kCuts, "CutsGA",
252     "!H:!V:FitMethod=GA:EffSel:Steps=30:Cycles=3:PopSize=100:SC_steps=10:SC_rate=5:SC_factor=0.95:VarProp=FSmart" );
253    
254     // Likelihood
255     if (Use_Likelihood)
256     factory->BookMethod( TMVA::Types::kLikelihood, "Likelihood",
257 kukartse 1.2 "H:V:!TransformOutput:PDFInterpol=Spline2" );
258     // "H:V:!TransformOutput:PDFInterpol=Spline2:\
259     //NSmoothSig[0]=10:NSmoothSig[1]=10:NSmoothSig[2]=10:NSmoothSig[3]=100:NSmoothSig[4]=100:NSmoothSig[5]=100:NSmoothSig[6]=100:\
260     //NSmoothBkg[0]=10:NSmoothBkg[1]=10:NSmoothBkg[2]=10:NSmoothBkg[3]=100:NSmoothBkg[4]=100:NSmoothBkg[5]=100:NSmoothBkg[6]=100:\
261     //NSmooth=10:NAvEvtPerBin=50" );
262     //NSmoothSig[0]=10:NSmoothSig[1]=10:NSmoothSig[2]=10:NSmoothSig[3]=10:NSmoothSig[4]=10:NSmoothSig[5]=10:NSmoothSig[6]=10:NSmoothSig[7]=10:NSmoothSig[8]=10:NSmoothSig[9]=10:NSmoothSig[10]=10:NSmoothSig[11]=10:NSmoothSig[12]=10:NSmoothSig[13]=10:NSmoothSig[14]=10:NSmoothSig[15]=10:NSmoothSig[16]=10: \
263     //NSmoothBkg[0]=10:NSmoothBkg[1]=10:NSmoothBkg[2]=10:NSmoothBkg[3]=10:NSmoothBkg[4]=10:NSmoothBkg[5]=10:NSmoothBkg[6]=10:NSmoothBkg[7]=10:NSmoothBkg[8]=10:NSmoothBkg[9]=10:NSmoothBkg[10]=10:NSmoothBkg[11]=10:NSmoothBkg[12]=10:NSmoothBkg[13]=10:NSmoothBkg[14]=10:NSmoothBkg[15]=10:NSmoothBkg[16]=10: \
264 kukartse 1.1
265     // test the decorrelated likelihood
266     if (Use_LikelihoodD)
267     factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodD",
268     "!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=100:NSmoothBkg[0]=10:NSmooth=5:NAvEvtPerBin=50:VarTransform=Decorrelate" );
269    
270     if (Use_LikelihoodPCA)
271     factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodPCA",
272     "!H:!V:!TransformOutput:PDFInterpol=Spline2:NSmoothSig[0]=100:NSmoothBkg[0]=10:NSmooth=5:NAvEvtPerBin=50:VarTransform=PCA" );
273    
274     // test the new kernel density estimator
275     if (Use_LikelihoodKDE)
276     factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodKDE",
277     "!H:!V:!TransformOutput:PDFInterpol=KDE:KDEtype=Gauss:KDEiter=Nonadaptive:KDEborder=None:NAvEvtPerBin=50" );
278    
279     // test the mixed splines and kernel density estimator (depending on which variable)
280     if (Use_LikelihoodMIX)
281     factory->BookMethod( TMVA::Types::kLikelihood, "LikelihoodMIX",
282     "!H:!V:!TransformOutput:PDFInterpol[0]=KDE:PDFInterpol[1]=KDE:PDFInterpol[2]=Spline2:PDFInterpol[3]=Spline2:KDEtype=Gauss:KDEiter=Nonadaptive:KDEborder=None:NAvEvtPerBin=50" );
283    
284     // PDE - RS method
285     if (Use_PDERS)
286     factory->BookMethod( TMVA::Types::kPDERS, "PDERS",
287     "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:InitialScale=0.99" );
288    
289     if (Use_PDERSD)
290     factory->BookMethod( TMVA::Types::kPDERS, "PDERSD",
291     "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:InitialScale=0.99:VarTransform=Decorrelate" );
292    
293     if (Use_PDERSPCA)
294     factory->BookMethod( TMVA::Types::kPDERS, "PDERSPCA",
295     "!H:!V:VolumeRangeMode=Adaptive:KernelEstimator=Gauss:GaussSigma=0.3:NEventsMin=400:NEventsMax=600:InitialScale=0.99:VarTransform=PCA" );
296    
297     // K-Nearest Neighbour classifier (KNN)
298     if (Use_KNN)
299     factory->BookMethod( TMVA::Types::kKNN, "KNN",
300     "nkNN=40:TreeOptDepth=6:ScaleFrac=0.8:!UseKernel:!Trim" );
301    
302     // H-Matrix (chi2-squared) method
303     if (Use_HMatrix)
304     factory->BookMethod( TMVA::Types::kHMatrix, "HMatrix", "!H:!V" );
305    
306     // Fisher discriminant
307     if (Use_Fisher)
308     factory->BookMethod( TMVA::Types::kFisher, "Fisher",
309     "H:!V:!Normalise:CreateMVAPdfs:Fisher:NbinsMVAPdf=50:NsmoothMVAPdf=1" );
310    
311     // Function discrimination analysis (FDA) -- test of various fitters - the recommended one is Minuit or GA
312     if (Use_FDA_MC)
313     factory->BookMethod( TMVA::Types::kFDA, "FDA_MC",
314     "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:SampleSize=100000:Sigma=0.1" );
315    
316     if (Use_FDA_GA)
317     factory->BookMethod( TMVA::Types::kFDA, "FDA_GA",
318     "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:PopSize=100:Cycles=3:Steps=20:Trim=True:SaveBestGen=0" );
319    
320     if (Use_FDA_SA)
321     factory->BookMethod( TMVA::Types::kFDA, "FDA_SA",
322     "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=SA:MaxCalls=50000:TemperatureGradient=0.7:InitialTemperature=2000000:MinTemperature=500:Eps=1e-04:NFunLoops=5:NEps=4" );
323    
324     if (Use_FDA_MT)
325     factory->BookMethod( TMVA::Types::kFDA, "FDA_MT",
326     "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=2:UseImprove:UseMinos:SetBatch" );
327    
328     if (Use_FDA_GAMT)
329     factory->BookMethod( TMVA::Types::kFDA, "FDA_GAMT",
330     "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=GA:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:Cycles=1:PopSize=5:Steps=5:Trim" );
331    
332     if (Use_FDA_MCMT)
333     factory->BookMethod( TMVA::Types::kFDA, "FDA_MCMT",
334     "H:!V:Formula=(0)+(1)*x0+(2)*x1+(3)*x2+(4)*x3:ParRanges=(-1,1);(-10,10);(-10,10);(-10,10);(-10,10):FitMethod=MC:Converger=MINUIT:ErrorLevel=1:PrintLevel=-1:FitStrategy=0:!UseImprove:!UseMinos:SetBatch:SampleSize=20" );
335    
336     // TMVA ANN: MLP (recommended ANN) -- all ANNs in TMVA are Multilayer Perceptrons
337     if (Use_MLP)
338     factory->BookMethod( TMVA::Types::kMLP, "MLP", "Normalise:H:!V:NCycles=200:HiddenLayers=N+1,N:TestRate=5" );
339    
340     // CF(Clermont-Ferrand)ANN
341     if (Use_CFMlpANN)
342     factory->BookMethod( TMVA::Types::kCFMlpANN, "CFMlpANN", "!H:!V:NCycles=500:HiddenLayers=N+1,N" ); // n_cycles:#nodes:#nodes:...
343    
344     // Tmlp(Root)ANN
345     if (Use_TMlpANN)
346     factory->BookMethod( TMVA::Types::kTMlpANN, "TMlpANN", "!H:!V:NCycles=200:HiddenLayers=N+1,N" ); // n_cycles:#nodes:#nodes:...
347    
348     // Support Vector Machines using three different Kernel types (Gauss, polynomial and linear)
349     if (Use_SVM_Gauss)
350     factory->BookMethod( TMVA::Types::kSVM, "SVM_Gauss", "Sigma=2:C=1:Tol=0.001:Kernel=Gauss" );
351    
352     if (Use_SVM_Poly)
353     factory->BookMethod( TMVA::Types::kSVM, "SVM_Poly", "Order=4:Theta=1:C=0.1:Tol=0.001:Kernel=Polynomial" );
354    
355     if (Use_SVM_Lin)
356     factory->BookMethod( TMVA::Types::kSVM, "SVM_Lin", "!H:!V:Kernel=Linear:C=1:Tol=0.001" );
357    
358     // Boosted Decision Trees (second one with decorrelation)
359     if (Use_BDT)
360     factory->BookMethod( TMVA::Types::kBDT, "BDT",
361     "!H:!V:NTrees=400:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=CostComplexity:PruneStrength=4.5" );
362     if (Use_BDTD)
363     factory->BookMethod( TMVA::Types::kBDT, "BDTD",
364     "!H:!V:NTrees=400:BoostType=AdaBoost:SeparationType=GiniIndex:nCuts=20:PruneMethod=CostComplexity:PruneStrength=4.5:VarTransform=Decorrelate" );
365    
366     // RuleFit -- TMVA implementation of Friedman's method
367     if (Use_RuleFitTMVA)
368 kukartse 1.2 factory->BookMethod( TMVA::Types::kRuleFit, "RuleFitTMVA", "H:!V:RuleFitModule=RFTMVA:Model=ModRuleLinear:MinImp=0.001:RuleMinDist=0.001:NTrees=20:fEventsMin=0.01:fEventsMax=0.5:GDTau=-1.0:GDTauPrec=0.01:GDStep=0.01:GDNSteps=100000:GDErrScale=1.02" );
369 kukartse 1.1
370     // Friedman's RuleFit method, implementation by J. Friedman
371     if (Use_RuleFitJF)
372     factory->BookMethod( TMVA::Types::kRuleFit, "RuleFitJF",
373     "!V:RuleFitModule=RFFriedman:Model=ModRuleLinear:GDStep=0.01:GDNSteps=10000:GDErrScale=1.1:RFNendnodes=4" );
374    
375     // ---- Now you can tell the factory to train, test, and evaluate the MVAs
376    
377     // Train MVAs using the set of training events
378     factory->TrainAllMethods();
379    
380     // ---- Evaluate all MVAs using the set of test events
381     factory->TestAllMethods();
382    
383     // ----- Evaluate and compare performance of all configured MVAs
384     factory->EvaluateAllMethods();
385    
386     // --------------------------------------------------------------
387    
388     // Save the output
389     outputFile->Close();
390    
391     std::cout << "==> wrote root file TMVA.root" << std::endl;
392     std::cout << "==> TMVAnalysis is done!" << std::endl;
393    
394     // Clean up
395     delete factory;
396    
397     // Launch the GUI for the root macros
398     if (!gROOT->IsBatch()) TMVAGui( outfileName );
399     }