ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/UserCode/OSUT3Analysis/Configuration/scripts/makeYieldsTables.py
Revision: 1.3
Committed: Fri Aug 16 14:45:45 2013 UTC (11 years, 8 months ago) by lantonel
Content type: text/x-python
Branch: MAIN
Changes since 1.2: +15 -9 lines
Log Message:
accounted for the possibility that there are different channels included in different output files

File Contents

# Content
1 #!/usr/bin/env python
2 import sys
3 import os
4 import re
5 import math
6 from array import *
7 from decimal import *
8 from optparse import OptionParser
9 from OSUT3Analysis.Configuration.configurationOptions import *
10 from OSUT3Analysis.Configuration.processingUtilities import *
11 from OSUT3Analysis.Configuration.formattingUtilities import *
12
13
14
15 ### parse the command-line options
16
17 parser = OptionParser()
18 parser = set_commandline_arguments(parser)
19
20 parser.remove_option("-o")
21 parser.remove_option("-n")
22 parser.remove_option("-u")
23 parser.remove_option("-e")
24 parser.remove_option("-r")
25 parser.remove_option("-R")
26 parser.remove_option("-d")
27 parser.remove_option("-b")
28 parser.remove_option("--2D")
29 parser.remove_option("-y")
30 parser.remove_option("-p")
31
32
33 (arguments, args) = parser.parse_args()
34
35
36 if arguments.localConfig:
37 sys.path.append(os.getcwd())
38 exec("from " + arguments.localConfig.rstrip('.py') + " import *")
39
40 #set condor directory
41 condor_dir = set_condor_output_dir(arguments)
42
43
44 from ROOT import TFile, TH1F, gDirectory
45
46
47 hLine = "\\hline\n"
48 endLine = " \\\\ "
49 newLine = " \n"
50
51
52 #### check which input datasets have valid output files
53 processed_datasets = []
54 for dataset in datasets:
55 if types[dataset] is "signalMC": #only include bgMC and data yields
56 continue
57 fileName = condor_dir + "/" + dataset + ".root"
58 if not os.path.exists(fileName):
59 continue
60 testFile = TFile(fileName)
61 if not (testFile.IsZombie()):
62 processed_datasets.append(dataset)
63
64 #### exit if no datasets found
65 if len(processed_datasets) is 0:
66 sys.exit("Can't find any output root files for the given list of datasets")
67
68
69 #open the first ROOT file and get the list of channels
70 channels = []
71 dataset_file = "%s/%s.root" % (condor_dir,processed_datasets[0])
72 inputFile = TFile(dataset_file)
73 inputFile.cd("OSUAnalysis")
74
75 for key in gDirectory.GetListOfKeys():
76 if (key.GetClassName() != "TDirectoryFile"):
77 continue
78 channels.append(key.GetName())
79
80 #get and store the yields and errors for each dataset
81 yields = {}
82 errors = {}
83 bgMCSum = {}
84 bgMCErrSquared = {}
85 processed_datasets_channels = {}
86
87 for channel in channels:
88 bgMCSum[channel] = 0
89 bgMCErrSquared[channel] = 0
90 processed_datasets_channels[channel] = []
91
92 for sample in processed_datasets:
93 yields[sample] = {}
94 errors[sample] = {}
95 dataset_file = "%s/%s.root" % (condor_dir,sample)
96 inputFile = TFile(dataset_file)
97 for channel in channels:
98 cutFlowHistogram = inputFile.Get("OSUAnalysis/"+channel+"CutFlow")
99 if not cutFlowHistogram:
100 print "WARNING: didn't find cutflow for ", sample, "dataset in", channel, "channel"
101 continue
102 processed_datasets_channels[channel].append(sample)
103
104 yield_ = cutFlowHistogram.GetBinContent(cutFlowHistogram.GetNbinsX())
105 error_ = cutFlowHistogram.GetBinError(cutFlowHistogram.GetNbinsX())
106 yields[sample][channel] = formatNumber(str(round_sigfigs(yield_,3)).rstrip("0").rstrip("."))
107 errors[sample][channel] = formatNumber(str(round_sigfigs(error_,3)).rstrip("0").rstrip("."))
108
109 if types[sample] is "bgMC":
110 bgMCSum[channel] = bgMCSum[channel] + yield_
111 bgMCErrSquared[channel] = bgMCErrSquared[channel] + error_*error_
112
113 inputFile.Close()
114
115
116 #write a table for each channel to a separate tex file
117
118 for channel in channels:
119 outputFile = condor_dir + "/yields_" + plainTextString(channel) + ".tex"
120 fout = open (outputFile, "w")
121 fout.write ("\\makebox[0pt]{\\renewcommand{\\arraystretch}{1.2}\\begin{tabular}{lr}"+newLine+hLine)
122
123 fout.write("Event Source & Event Yield $\pm$ 1$\sigma$ (stat.)"+endLine+newLine+hLine)
124
125 #write a line for each background sample
126 bgMCcounter = 0
127 for sample in processed_datasets_channels[channel]:
128 if types[sample] is not "bgMC":
129 continue
130 bgMCcounter = bgMCcounter + 1
131 rawlabel = "$" + labels[sample] + "$"
132 label = rawlabel.replace("#","\\").replace("\\rightarrow","{\\rightarrow}").replace(" ","\\ ")
133 fout.write(label + " & " + yields[sample][channel] + " $\pm$ " + errors[sample][channel] + endLine + newLine)
134
135 #write a line with the sum of the backgrounds
136 if bgMCcounter is not 0:
137
138 bgMCSum_ = formatNumber(str(round_sigfigs(bgMCSum[channel],3)).rstrip("0").rstrip("."))
139 bgMCErr_ = formatNumber(str(round_sigfigs(math.sqrt(bgMCErrSquared[channel]),3)).rstrip("0").rstrip("."))
140
141 fout.write(hLine+"background sum & " + bgMCSum_ + " $\pm$ " + bgMCErr_ + endLine + newLine + hLine)
142
143 #write a line for each data sample
144 for sample in processed_datasets_channels[channel]:
145 if types[sample] is not "data":
146 continue
147 rawlabel = "$" + labels[sample] + "$"
148 label = rawlabel.replace("#","\\").replace("\\rightarrow","{\\rightarrow}").replace(" ","\\ ")
149 fout.write(label + " & " + yields[sample][channel] + " $\pm$ " + errors[sample][channel] + endLine + newLine)
150
151 fout.write("\\end{tabular}}")
152 fout.close()
153