3 |
|
import os |
4 |
|
import fileinput |
5 |
|
from array import * |
6 |
< |
from optparse import OptionParser |
6 |
> |
import argparse |
7 |
|
from OSUT3Analysis.Configuration.configurationOptions import * |
8 |
|
from OSUT3Analysis.Configuration.processingUtilities import * |
9 |
|
|
10 |
< |
parser = OptionParser() |
10 |
> |
parser = argparse.ArgumentParser() |
11 |
|
parser = set_commandline_arguments(parser) |
12 |
|
|
13 |
< |
(options, args) = parser.parse_args() |
13 |
> |
arguments = parser.parse_args() |
14 |
|
|
15 |
< |
if options.localConfig: |
15 |
> |
if arguments.localConfig: |
16 |
|
sys.path.append(os.getcwd()) |
17 |
< |
exec("from " + options.localConfig.rstrip('.py') + " import *") |
17 |
> |
exec("from " + arguments.localConfig.rstrip('.py') + " import *") |
18 |
|
|
19 |
< |
condor_dir = set_condor_output_dir(options) |
19 |
> |
condor_dir = set_condor_output_dir(arguments) |
20 |
|
|
21 |
|
from ROOT import TFile, gROOT, gStyle, gDirectory, TKey |
22 |
|
|
27 |
|
replacements = { |
28 |
|
">":"$>$", |
29 |
|
"<":"$<$", |
30 |
< |
"eta ":"$\\eta$ ", |
30 |
> |
" eta ":" $\\eta$ ", |
31 |
> |
" abs(eta) ":" $|\\eta|$ ", |
32 |
|
"#":"Num", |
33 |
|
|
34 |
|
"\\rightarrow":"{\\rightarrow}", |
37 |
|
"BCtoE QCD":"BCtoE$ $QCD", |
38 |
|
|
39 |
|
|
40 |
< |
"Pt ":"pt ", |
41 |
< |
"PT ":"pt ", |
41 |
< |
"pT ":"pt ", |
42 |
< |
"pt ":"$p_{T}$ ", |
43 |
< |
|
44 |
< |
"Ht ":"HT ", |
45 |
< |
"ht ":"HT ", |
46 |
< |
"hT ":"HT ", |
47 |
< |
"HT ":"$H_{T}$ ", |
48 |
< |
"tig$H_{T}$ ":"tight ", |
40 |
> |
" pt ":" $p_{T}$ ", |
41 |
> |
" ht ":"$H_{T}$ ", |
42 |
|
|
50 |
– |
"D0":"d0", |
43 |
|
"d0":"$d_{0}$", |
44 |
|
|
45 |
< |
"MET ":"Met ", |
46 |
< |
"MEt ":"Met ", |
47 |
< |
"met ":"Met ", |
48 |
< |
"Met ":"$\\not\\!\\!{E}_{T}$ ", |
49 |
< |
|
50 |
< |
"MHT ":"Mht ", |
51 |
< |
"MHt ":"Mht ", |
52 |
< |
"mht ":"Mht ", |
53 |
< |
"Mht ":"$\\not\\!\\!{H}_{T}$ ", |
45 |
> |
" MET ":" Met ", |
46 |
> |
" MEt ":" Met ", |
47 |
> |
" met ":" Met ", |
48 |
> |
" Met ":"$\\not\\!\\!{E}_{T}$ ", |
49 |
> |
|
50 |
> |
" MHT ":" Mht ", |
51 |
> |
" MHt ":" Mht ", |
52 |
> |
" mht ":" Mht ", |
53 |
> |
" Mht ":"$\\not\\!\\!{H}_{T}$ ", |
54 |
|
|
55 |
|
"M_Z" : "$M_{Z}$", |
56 |
|
"M_mumu" : "$M_{\\mu\\mu}$", |
57 |
|
"M_ee" : "$M_{ee}$", |
58 |
|
"M_ll" : "$M_{ll}$", |
59 |
|
|
68 |
– |
"|" : "$|$" |
60 |
|
} |
61 |
|
|
62 |
|
secondary_replacements = { |
63 |
|
"$$<$":"$<" |
64 |
+ |
|
65 |
|
} |
66 |
|
|
67 |
|
|
69 |
|
for dataset in datasets: |
70 |
|
fileName = condor_dir + "/" + dataset + ".root" |
71 |
|
if not os.path.exists(fileName): |
72 |
< |
print "Couldn't find output file for",dataset,"dataset" |
72 |
> |
#print "Couldn't find output file for",dataset,"dataset" |
73 |
|
continue |
74 |
|
testFile = TFile(fileName) |
75 |
|
if not (testFile.IsZombie()): |
76 |
|
processed_datasets.append(dataset) |
77 |
|
|
78 |
|
if len(processed_datasets) is 0: |
79 |
< |
sys.exit("No datasets have been processed") |
79 |
> |
sys.exit("Can't find any output root files for the given list of datasets") |
80 |
|
|
81 |
|
#### open first input file and re-make its directory structure in the output file |
82 |
|
testFile = TFile(condor_dir + "/" + processed_datasets[0] + ".root") |
92 |
|
channels.append(key2.GetName()) |
93 |
|
|
94 |
|
fout = open (texfile, "w") |
95 |
< |
fout.write ("\\documentclass{article}\n\n") |
95 |
> |
fout.write ("\\documentclass[a2paper,8pt]{article}\n\n") |
96 |
|
fout.write ("\\usepackage[landscape,margin=0.15cm]{geometry}\n\n") |
97 |
|
fout.write ("\\usepackage{multirow}\n\n") |
98 |
|
fout.write ("\\begin{document}\n\n") |
101 |
|
|
102 |
|
firstChannel = True |
103 |
|
|
104 |
< |
for channel in channels: # loop over final states, which each have their own directory |
105 |
< |
fout = open (texfile, "a") |
106 |
< |
if not firstChannel: |
107 |
< |
fout.write ("\\pagebreak\n\n") |
108 |
< |
firstChannel = False |
109 |
< |
fout.write ("\\section{" + channel + " channel}\n\n") |
110 |
< |
fout.close () |
104 |
> |
weight = intLumi / 10000.0 |
105 |
> |
for dataset in processed_datasets: |
106 |
> |
dataset_file = "%s/%s.root" % (condor_dir,dataset) |
107 |
> |
if types[dataset] != "data": |
108 |
> |
os.system("mergeTFileServiceHistograms -i %s -o %s -w %g" % (dataset_file, dataset_file + "_tmp", weight)) |
109 |
> |
else: |
110 |
> |
os.system("mergeTFileServiceHistograms -i %s -o %s -w %g" % (dataset_file, dataset_file + "_tmp", 1.0)) |
111 |
|
|
112 |
< |
args = "" |
113 |
< |
hist = channel + "CutFlow" |
112 |
> |
for channel in channels: # loop over final states, which each have their own directory |
113 |
> |
formatted_channel = channel.replace("_"," ") |
114 |
> |
cutFlowArgs = "" |
115 |
> |
selectionArgs = "" |
116 |
> |
minusOneArgs = "" |
117 |
|
#print hist |
118 |
|
for dataset in processed_datasets: |
119 |
< |
dataset_file = "%s/%s.root" % (condor_dir,dataset) |
119 |
> |
dataset_file = "%s/%s.root_tmp" % (condor_dir,dataset) |
120 |
|
#print dataset_file |
121 |
< |
args = args + " " + dataset_file |
122 |
< |
args = args + " " + hist |
121 |
> |
cutFlowArgs = cutFlowArgs + " " + dataset_file |
122 |
> |
selectionArgs = selectionArgs + " " + dataset_file |
123 |
> |
minusOneArgs = minusOneArgs + " " + dataset_file |
124 |
> |
cutFlowArgs = cutFlowArgs + " " + channel + "CutFlow" |
125 |
> |
selectionArgs = selectionArgs + " " + channel + "Selection" |
126 |
> |
minusOneArgs = minusOneArgs + " " + channel + "MinusOne" |
127 |
|
|
128 |
|
rawlabel = "$" + labels[dataset] + "$" |
129 |
|
label = rawlabel.replace("#","\\") |
130 |
|
label = "'" + label + "'" |
131 |
|
#print label |
132 |
< |
args = args + " " + label |
132 |
> |
cutFlowArgs = cutFlowArgs + " " + label |
133 |
> |
selectionArgs = selectionArgs + " " + label |
134 |
> |
minusOneArgs = minusOneArgs + " " + label |
135 |
|
|
136 |
|
|
137 |
|
#make cutFlowTable objects |
138 |
< |
os.system("cutFlowTable %s >> %s" % (args,texfile)) |
138 |
> |
fout = open (texfile, "a") |
139 |
> |
if not firstChannel: |
140 |
> |
fout.write ("\\pagebreak\n\n") |
141 |
> |
firstChannel = False |
142 |
> |
fout.write ("\\section*{" + formatted_channel + " channel}\n\n") |
143 |
> |
fout.write ("\\subsection*{Cut flow}\n\n") |
144 |
> |
fout.close () |
145 |
> |
os.system("cutFlowTable -l %g -m %s >> %s" % (intLumi,cutFlowArgs,texfile)) |
146 |
> |
fout = open (texfile, "a") |
147 |
> |
fout.write ("\\pagebreak\n\n") |
148 |
> |
fout.write ("\\section*{" + formatted_channel + " channel}\n\n") |
149 |
> |
fout.write ("\\subsection*{Individual selection}\n\n") |
150 |
> |
fout.close () |
151 |
> |
os.system("cutFlowTable -l %g %s >> %s" % (intLumi,selectionArgs,texfile)) |
152 |
> |
fout = open (texfile, "a") |
153 |
> |
fout.write ("\\pagebreak\n\n") |
154 |
> |
fout.write ("\\section*{" + formatted_channel + " channel}\n\n") |
155 |
> |
fout.write ("\\subsection*{Minus one}\n\n") |
156 |
> |
fout.close () |
157 |
> |
os.system("cutFlowTable -l %g %s >> %s" % (intLumi,minusOneArgs,texfile)) |
158 |
> |
|
159 |
> |
for dataset in processed_datasets: |
160 |
> |
dataset_file = "%s/%s.root_tmp" % (condor_dir,dataset) |
161 |
> |
os.remove(dataset_file) |
162 |
|
|
163 |
|
#reformat tex files |
164 |
|
for line in fileinput.FileInput(texfile,inplace=1): |