1 |
#include "tmvaglob.C"
|
2 |
|
3 |
// this macro serves to assess the convergence of the MLP ANN.
|
4 |
// It compares the error estimator for the training and testing samples.
|
5 |
// If overtraining occurred, the estimator for the training sample should
|
6 |
// monotoneously decrease, while the estimator of the testing sample should
|
7 |
// show a minimum after which it increases.
|
8 |
|
9 |
// input: - Input file (result from TMVA),
|
10 |
// - use of TMVA plotting TStyle
|
11 |
void annconvergencetest( TString fin = "TMVA.root", Bool_t useTMVAStyle = kTRUE )
|
12 |
{
|
13 |
// set style and remove existing canvas'
|
14 |
TMVAGlob::Initialize( useTMVAStyle );
|
15 |
|
16 |
// checks if file with name "fin" is already open, and if not opens one
|
17 |
TFile* file = TMVAGlob::OpenFile( fin );
|
18 |
|
19 |
// get all titles of the method likelihood
|
20 |
TList titles;
|
21 |
UInt_t ninst = TMVAGlob::GetListOfTitles("Method_MLP",titles);
|
22 |
if (ninst==0) {
|
23 |
cout << "Could not locate directory 'Method_MLP' in file " << fin << endl;
|
24 |
return;
|
25 |
}
|
26 |
// loop over all titles
|
27 |
TIter keyIter(&titles);
|
28 |
TDirectory *lhdir;
|
29 |
TKey *key;
|
30 |
while ((key = TMVAGlob::NextKey(keyIter,"TDirectory"))) {
|
31 |
lhdir = (TDirectory *)key->ReadObj();
|
32 |
annconvergencetest( lhdir );
|
33 |
}
|
34 |
}
|
35 |
|
36 |
void annconvergencetest( TDirectory *lhdir )
|
37 |
{
|
38 |
TString jobName = lhdir->GetName();
|
39 |
static icanvas = -1;
|
40 |
icanvas++;
|
41 |
TCanvas* c = new TCanvas( Form("MLPConvergenceTest_%s",jobName.Data()), Form("MLP Convergence Test, %s",jobName.Data()),
|
42 |
100 + (icanvas)*40, 0 + (icanvas+1)*20, 600, 580*0.8 );
|
43 |
|
44 |
TH1* estimatorHistTrain = (TH1*)lhdir->Get( "estimatorHistTrain" );
|
45 |
TH1* estimatorHistTest = (TH1*)lhdir->Get( "estimatorHistTest" );
|
46 |
|
47 |
Double_t m1 = estimatorHistTrain->GetMaximum();
|
48 |
Double_t m2 = estimatorHistTest ->GetMaximum();
|
49 |
Double_t max = TMath::Max( m1, m2 );
|
50 |
m1 = estimatorHistTrain->GetMinimum();
|
51 |
m2 = estimatorHistTest ->GetMinimum();
|
52 |
Double_t min = TMath::Min( m1, m2 );
|
53 |
estimatorHistTrain->SetMaximum( max + 0.1*(max - min) );
|
54 |
estimatorHistTrain->SetMinimum( min - 0.1*(max - min) );
|
55 |
estimatorHistTrain->SetLineColor( 2 );
|
56 |
estimatorHistTrain->SetLineWidth( 2 );
|
57 |
estimatorHistTrain->SetTitle( TString("MLP Convergence Test") );
|
58 |
|
59 |
estimatorHistTest->SetLineColor( 4 );
|
60 |
estimatorHistTest->SetLineWidth( 2 );
|
61 |
|
62 |
estimatorHistTrain->GetXaxis()->SetTitle( "Epochs" );
|
63 |
estimatorHistTrain->GetYaxis()->SetTitle( "Estimator" );
|
64 |
estimatorHistTrain->GetXaxis()->SetTitleOffset( 1.20 );
|
65 |
estimatorHistTrain->GetYaxis()->SetTitleOffset( 1.65 );
|
66 |
|
67 |
estimatorHistTrain->Draw();
|
68 |
estimatorHistTest ->Draw("same");
|
69 |
|
70 |
// need a legend
|
71 |
TLegend *legend= new TLegend( 1 - c->GetRightMargin() - 0.45, 1-c->GetTopMargin() - 0.20,
|
72 |
1 - c->GetRightMargin() - 0.05, 1-c->GetTopMargin() - 0.05 );
|
73 |
|
74 |
legend->AddEntry(estimatorHistTrain,"Training Sample","l");
|
75 |
legend->AddEntry(estimatorHistTest,"Test sample","l");
|
76 |
legend->Draw("same");
|
77 |
legend->SetMargin( 0.3 );
|
78 |
|
79 |
c->cd();
|
80 |
TMVAGlob::plot_logo(); // don't understand why this doesn't work ... :-(
|
81 |
c->Update();
|
82 |
|
83 |
TString fname = "plots/annconvergencetest";
|
84 |
TMVAGlob::imgconv( c, fname );
|
85 |
}
|