1 |
peller |
1.8 |
#!/usr/bin/env python
|
2 |
peller |
1.53 |
import os, sys, ROOT, warnings, pickle
|
3 |
nmohr |
1.68 |
ROOT.gROOT.SetBatch(True)
|
4 |
peller |
1.1 |
from array import array
|
5 |
|
|
from math import sqrt
|
6 |
nmohr |
1.68 |
from copy import copy, deepcopy
|
7 |
peller |
1.1 |
#suppres the EvalInstace conversion warning bug
|
8 |
|
|
warnings.filterwarnings( action='ignore', category=RuntimeWarning, message='creating converter.*' )
|
9 |
nmohr |
1.31 |
from optparse import OptionParser
|
10 |
nmohr |
1.80 |
from myutils import BetterConfigParser, Sample, progbar, printc, ParseInfo, Rebinner, HistoMaker
|
11 |
peller |
1.1 |
|
12 |
nmohr |
1.68 |
#--CONFIGURE---------------------------------------------------------------------
|
13 |
nmohr |
1.31 |
argv = sys.argv
|
14 |
|
|
parser = OptionParser()
|
15 |
bortigno |
1.79 |
parser.add_option("-V", "--variable", dest="variable", default="",
|
16 |
nmohr |
1.31 |
help="variable for shape analysis")
|
17 |
|
|
parser.add_option("-C", "--config", dest="config", default=[], action="append",
|
18 |
|
|
help="configuration file")
|
19 |
|
|
(opts, args) = parser.parse_args(argv)
|
20 |
nmohr |
1.20 |
config = BetterConfigParser()
|
21 |
nmohr |
1.31 |
config.read(opts.config)
|
22 |
nmohr |
1.68 |
var=opts.variable
|
23 |
|
|
#-------------------------------------------------------------------------------
|
24 |
|
|
|
25 |
|
|
#--read variables from config---------------------------------------------------
|
26 |
|
|
# 7 or 8TeV Analysis
|
27 |
nmohr |
1.31 |
anaTag = config.get("Analysis","tag")
|
28 |
nmohr |
1.68 |
if not any([anaTag == '7TeV',anaTag == '8TeV']):
|
29 |
|
|
raise Exception("anaTag %s unknown. Specify 8TeV or 7TeV in the general config"%anaTag)
|
30 |
|
|
# Directories:
|
31 |
peller |
1.1 |
Wdir=config.get('Directories','Wdir')
|
32 |
peller |
1.42 |
vhbbpath=config.get('Directories','vhbbpath')
|
33 |
nmohr |
1.46 |
samplesinfo=config.get('Directories','samplesinfo')
|
34 |
peller |
1.67 |
path = config.get('Directories','dcSamples')
|
35 |
nmohr |
1.68 |
outpath=config.get('Directories','limits')
|
36 |
|
|
try:
|
37 |
|
|
os.stat(outpath)
|
38 |
|
|
except:
|
39 |
|
|
os.mkdir(outpath)
|
40 |
|
|
# parse histogram config:
|
41 |
nmohr |
1.80 |
treevar = config.get('dc:%s'%var,'var')
|
42 |
|
|
name = config.get('dc:%s'%var,'wsVarName')
|
43 |
|
|
title = name
|
44 |
|
|
nBins = int(config.get('dc:%s'%var,'range').split(',')[0])
|
45 |
|
|
xMin = float(config.get('dc:%s'%var,'range').split(',')[1])
|
46 |
|
|
xMax = float(config.get('dc:%s'%var,'range').split(',')[2])
|
47 |
|
|
ROOToutname = config.get('dc:%s'%var,'dcName')
|
48 |
|
|
RCut = config.get('dc:%s'%var,'cut')
|
49 |
|
|
signals = config.get('dc:%s'%var,'signal').split(' ')
|
50 |
|
|
datas = config.get('dc:%s'%var,'dcBin')
|
51 |
|
|
Datacardbin=config.get('dc:%s'%var,'dcBin')
|
52 |
|
|
anType = config.get('dc:%s'%var,'type')
|
53 |
peller |
1.29 |
setup=eval(config.get('LimitGeneral','setup'))
|
54 |
nmohr |
1.68 |
#Systematics:
|
55 |
|
|
if config.has_option('LimitGeneral','addSample_sys'):
|
56 |
|
|
addSample_sys = eval(config.get('LimitGeneral','addSample_sys'))
|
57 |
|
|
additionals = [addSample_sys[key] for key in addSample_sys]
|
58 |
|
|
else:
|
59 |
|
|
addSample_sys = None
|
60 |
|
|
additionals = []
|
61 |
|
|
#find out if BDT or MJJ:
|
62 |
|
|
bdt = False
|
63 |
|
|
mjj = False
|
64 |
bortigno |
1.78 |
cr = False
|
65 |
nmohr |
1.68 |
if str(anType) == 'BDT':
|
66 |
|
|
bdt = True
|
67 |
|
|
systematics = eval(config.get('LimitGeneral','sys_BDT'))
|
68 |
|
|
elif str(anType) == 'Mjj':
|
69 |
|
|
mjj = True
|
70 |
|
|
systematics = eval(config.get('LimitGeneral','sys_Mjj'))
|
71 |
bortigno |
1.78 |
elif str(anType) == 'cr':
|
72 |
|
|
cr = True
|
73 |
|
|
systematics = eval(config.get('LimitGeneral','sys_cr'))
|
74 |
|
|
|
75 |
nmohr |
1.68 |
sys_cut_suffix=eval(config.get('LimitGeneral','sys_cut_suffix'))
|
76 |
peller |
1.86 |
sys_cut_include=[]
|
77 |
|
|
if config.has_option('LimitGeneral','sys_cut_include'):
|
78 |
|
|
sys_cut_include=eval(config.get('LimitGeneral','sys_cut_include'))
|
79 |
nmohr |
1.68 |
systematicsnaming = eval(config.get('LimitGeneral','systematicsnaming'))
|
80 |
|
|
sys_factor_dict = eval(config.get('LimitGeneral','sys_factor'))
|
81 |
|
|
sys_affecting = eval(config.get('LimitGeneral','sys_affecting'))
|
82 |
|
|
# weightF:
|
83 |
|
|
weightF = config.get('Weights','weightF')
|
84 |
nmohr |
1.84 |
weightF_systematics = eval(config.get('LimitGeneral','weightF_sys'))
|
85 |
peller |
1.77 |
# rescale stat shapes by sqrtN
|
86 |
|
|
rescaleSqrtN=eval(config.get('LimitGeneral','rescaleSqrtN'))
|
87 |
nmohr |
1.68 |
# get nominal cutstring:
|
88 |
|
|
treecut = config.get('Cuts',RCut)
|
89 |
|
|
# Train flag: splitting of samples
|
90 |
|
|
TrainFlag = eval(config.get('Analysis','TrainFlag'))
|
91 |
peller |
1.75 |
# toy data option:
|
92 |
|
|
toy=eval(config.get('LimitGeneral','toy'))
|
93 |
nmohr |
1.68 |
# blind data option:
|
94 |
|
|
blind=eval(config.get('LimitGeneral','blind'))
|
95 |
nmohr |
1.81 |
# additional blinding cut:
|
96 |
|
|
addBlindingCut = None
|
97 |
|
|
if config.has_option('LimitGeneral','addBlindingCut'):
|
98 |
|
|
addBlindingCut = config.get('LimitGeneral','addBlindingCut')
|
99 |
peller |
1.89 |
print 'adding add. blinding cut'
|
100 |
peller |
1.83 |
#change nominal shapes by syst
|
101 |
|
|
change_shapes = None
|
102 |
|
|
if config.has_option('LimitGeneral','change_shapes'):
|
103 |
|
|
change_shapes = eval(config.get('LimitGeneral','change_shapes'))
|
104 |
|
|
print 'changing the shapes'
|
105 |
bortigno |
1.78 |
#on control region cr never blind. Overwrite whatever is in the config
|
106 |
|
|
if str(anType) == 'cr':
|
107 |
|
|
if blind:
|
108 |
|
|
print '@WARNING: Changing blind to false since you are running for control region.'
|
109 |
|
|
blind = False
|
110 |
nmohr |
1.68 |
if blind:
|
111 |
bortigno |
1.78 |
printc('red','', 'I AM BLINDED!')
|
112 |
nmohr |
1.68 |
#get List of backgrounds in use:
|
113 |
|
|
backgrounds = eval(config.get('LimitGeneral','BKG'))
|
114 |
|
|
#Groups for adding samples together
|
115 |
peller |
1.75 |
GroupDict = eval(config.get('LimitGeneral','Group'))
|
116 |
nmohr |
1.68 |
#naming for DC
|
117 |
|
|
Dict= eval(config.get('LimitGeneral','Dict'))
|
118 |
|
|
#treating statistics bin-by-bin:
|
119 |
|
|
binstat = eval(config.get('LimitGeneral','binstat'))
|
120 |
|
|
# Use the rebinning:
|
121 |
|
|
rebin_active=eval(config.get('LimitGeneral','rebin_active'))
|
122 |
bortigno |
1.78 |
if str(anType) == 'cr':
|
123 |
|
|
if rebin_active:
|
124 |
|
|
print '@WARNING: Changing rebin_active to false since you are running for control region.'
|
125 |
|
|
rebin_active = False
|
126 |
peller |
1.77 |
# ignore stat shapes
|
127 |
|
|
ignore_stats = eval(config.get('LimitGeneral','ignore_stats'))
|
128 |
peller |
1.72 |
#max_rel = float(config.get('LimitGeneral','rebin_max_rel'))
|
129 |
nmohr |
1.68 |
signal_inject=config.get('LimitGeneral','signal_inject')
|
130 |
|
|
# add signal as background
|
131 |
|
|
add_signal_as_bkg=config.get('LimitGeneral','add_signal_as_bkg')
|
132 |
|
|
if not add_signal_as_bkg == 'None':
|
133 |
|
|
setup.append(add_signal_as_bkg)
|
134 |
|
|
#----------------------------------------------------------------------------
|
135 |
peller |
1.65 |
|
136 |
nmohr |
1.68 |
#--Setup--------------------------------------------------------------------
|
137 |
|
|
#Assign Pt region for sys factors
|
138 |
peller |
1.58 |
if 'HighPtLooseBTag' in ROOToutname:
|
139 |
|
|
pt_region = 'HighPtLooseBTag'
|
140 |
nmohr |
1.90 |
elif 'HighPt' in ROOToutname or 'highPt' in ROOToutname:
|
141 |
peller |
1.58 |
pt_region = 'HighPt'
|
142 |
nmohr |
1.90 |
elif 'MedPt' in ROOToutname:
|
143 |
|
|
pt_region = 'MedPt'
|
144 |
peller |
1.60 |
elif 'LowPt' in ROOToutname or 'lowPt' in ROOToutname:
|
145 |
peller |
1.58 |
pt_region = 'LowPt'
|
146 |
nmohr |
1.68 |
elif 'ATLAS' in ROOToutname:
|
147 |
|
|
pt_region = 'HighPt'
|
148 |
nmohr |
1.80 |
elif 'MJJ' in ROOToutname:
|
149 |
nmohr |
1.68 |
pt_region = 'HighPt'
|
150 |
|
|
else:
|
151 |
peller |
1.58 |
print "Unknown Pt region"
|
152 |
|
|
sys.exit("Unknown Pt region")
|
153 |
nmohr |
1.68 |
# Set rescale factor of 2 in case of TrainFalg
|
154 |
peller |
1.44 |
if TrainFlag:
|
155 |
|
|
MC_rescale_factor=2.
|
156 |
|
|
print 'I RESCALE BY 2.0'
|
157 |
|
|
else: MC_rescale_factor = 1.
|
158 |
nmohr |
1.68 |
#systematics up/down
|
159 |
|
|
UD = ['Up','Down']
|
160 |
|
|
#Parse samples configuration
|
161 |
|
|
info = ParseInfo(samplesinfo,path)
|
162 |
|
|
# get all the treeCut sets
|
163 |
|
|
# create different sample Lists
|
164 |
|
|
all_samples = info.get_samples(signals+backgrounds+additionals)
|
165 |
|
|
signal_samples = info.get_samples(signals)
|
166 |
|
|
background_samples = info.get_samples(backgrounds)
|
167 |
nmohr |
1.80 |
data_sample_names = config.get('dc:%s'%var,'data').split(' ')
|
168 |
peller |
1.77 |
data_samples = info.get_samples(data_sample_names)
|
169 |
nmohr |
1.68 |
#-------------------------------------------------------------------------------------------------
|
170 |
|
|
|
171 |
|
|
optionsList=[]
|
172 |
|
|
|
173 |
nmohr |
1.82 |
def appendList(): optionsList.append({'cut':copy(_cut),'var':copy(_treevar),'name':copy(_name),'nBins':nBins,'xMin':xMin,'xMax':xMax,'weight':copy(_weight),'blind':blind})
|
174 |
nmohr |
1.68 |
|
175 |
|
|
#nominal
|
176 |
|
|
_cut = treecut
|
177 |
|
|
_treevar = treevar
|
178 |
|
|
_name = title
|
179 |
|
|
_weight = weightF
|
180 |
|
|
appendList()
|
181 |
|
|
|
182 |
|
|
#the 4 sys
|
183 |
|
|
for syst in systematics:
|
184 |
|
|
for Q in UD:
|
185 |
|
|
#default:
|
186 |
|
|
_cut = treecut
|
187 |
|
|
_name = title
|
188 |
|
|
_weight = weightF
|
189 |
|
|
#replace cut string
|
190 |
|
|
new_cut=sys_cut_suffix[syst]
|
191 |
|
|
if not new_cut == 'nominal':
|
192 |
|
|
old_str,new_str=new_cut.split('>')
|
193 |
|
|
_cut = treecut.replace(old_str,new_str.replace('?',Q))
|
194 |
|
|
_name = title
|
195 |
|
|
_weight = weightF
|
196 |
|
|
#replace tree variable
|
197 |
|
|
if bdt == True:
|
198 |
peller |
1.77 |
#ff[1]='%s_%s'%(sys,Q.lower())
|
199 |
|
|
_treevar = treevar.replace('.nominal','.%s_%s'%(syst,Q.lower()))
|
200 |
|
|
print _treevar
|
201 |
nmohr |
1.68 |
elif mjj == True:
|
202 |
nmohr |
1.80 |
if syst == 'JER' or syst == 'JES':
|
203 |
|
|
_treevar = 'H_%s.mass_%s'%(syst,Q.lower())
|
204 |
nmohr |
1.68 |
else:
|
205 |
|
|
_treevar = treevar
|
206 |
bortigno |
1.78 |
elif cr == True:
|
207 |
nmohr |
1.80 |
if syst == 'beff' or syst == 'bmis' or syst == 'beff1':
|
208 |
|
|
_treevar = treevar.replace(old_str,new_str.replace('?',Q))
|
209 |
bortigno |
1.78 |
else:
|
210 |
|
|
_treevar = treevar
|
211 |
nmohr |
1.68 |
#append
|
212 |
|
|
appendList()
|
213 |
|
|
|
214 |
|
|
#UEPS
|
215 |
nmohr |
1.84 |
for weightF_sys in weightF_systematics:
|
216 |
|
|
for _weight in [config.get('Weights','%s_UP' %(weightF_sys)),config.get('Weights','%s_DOWN' %(weightF_sys))]:
|
217 |
nmohr |
1.68 |
_cut = treecut
|
218 |
|
|
_treevar = treevar
|
219 |
|
|
_name = title
|
220 |
|
|
appendList()
|
221 |
|
|
|
222 |
|
|
#for option in optionsList:
|
223 |
|
|
# print option['cut']
|
224 |
|
|
|
225 |
peller |
1.75 |
mc_hMaker = HistoMaker(all_samples,path,config,optionsList,GroupDict)
|
226 |
nmohr |
1.68 |
data_hMaker = HistoMaker(data_samples,path,config,[optionsList[0]])
|
227 |
|
|
#Calculate lumi
|
228 |
|
|
lumi = 0.
|
229 |
|
|
nData = 0
|
230 |
|
|
for job in data_samples:
|
231 |
|
|
nData += 1
|
232 |
|
|
lumi += float(job.lumi)
|
233 |
peller |
1.57 |
|
234 |
nmohr |
1.68 |
if nData > 1:
|
235 |
|
|
lumi = lumi/float(nData)
|
236 |
nmohr |
1.24 |
|
237 |
nmohr |
1.68 |
mc_hMaker.lumi = lumi
|
238 |
|
|
data_hMaker.lumi = lumi
|
239 |
peller |
1.62 |
|
240 |
nmohr |
1.81 |
if addBlindingCut:
|
241 |
|
|
for i in range(len(mc_hMaker.optionsList)):
|
242 |
|
|
mc_hMaker.optionsList[i]['cut'] += ' & %s' %addBlindingCut
|
243 |
|
|
for i in range(len(data_hMaker.optionsList)):
|
244 |
|
|
data_hMaker.optionsList[i]['cut'] += ' & %s' %addBlindingCut
|
245 |
peller |
1.69 |
|
246 |
nmohr |
1.82 |
|
247 |
peller |
1.72 |
if rebin_active:
|
248 |
|
|
mc_hMaker.calc_rebin(background_samples)
|
249 |
|
|
#transfer rebinning info to data maker
|
250 |
peller |
1.74 |
data_hMaker.norebin_nBins = copy(mc_hMaker.norebin_nBins)
|
251 |
|
|
data_hMaker.rebin_nBins = copy(mc_hMaker.rebin_nBins)
|
252 |
peller |
1.77 |
data_hMaker.nBins = copy(mc_hMaker.nBins)
|
253 |
|
|
data_hMaker._rebin = copy(mc_hMaker._rebin)
|
254 |
peller |
1.74 |
data_hMaker.mybinning = deepcopy(mc_hMaker.mybinning)
|
255 |
peller |
1.69 |
|
256 |
nmohr |
1.68 |
all_histos = {}
|
257 |
|
|
data_histos = {}
|
258 |
peller |
1.53 |
|
259 |
peller |
1.75 |
print '\n\t...fetching histos...'
|
260 |
|
|
|
261 |
nmohr |
1.68 |
for job in all_samples:
|
262 |
peller |
1.75 |
print '\t- %s'%job
|
263 |
nmohr |
1.88 |
if not GroupDict[job.name] in sys_cut_include:
|
264 |
peller |
1.86 |
# manual overwrite
|
265 |
peller |
1.89 |
if addBlindingCut:
|
266 |
|
|
all_histos[job.name] = mc_hMaker.get_histos_from_tree(job,treecut+'& %s'%addBlindingCut)
|
267 |
|
|
else:
|
268 |
|
|
all_histos[job.name] = mc_hMaker.get_histos_from_tree(job,treecut)
|
269 |
peller |
1.86 |
else:
|
270 |
|
|
all_histos[job.name] = mc_hMaker.get_histos_from_tree(job)
|
271 |
peller |
1.53 |
|
272 |
nmohr |
1.68 |
for job in data_samples:
|
273 |
peller |
1.75 |
print '\t- %s'%job
|
274 |
nmohr |
1.68 |
data_histos[job.name] = data_hMaker.get_histos_from_tree(job)[0]['DATA']
|
275 |
peller |
1.23 |
|
276 |
peller |
1.75 |
print '\t> done <\n'
|
277 |
|
|
|
278 |
|
|
i=0
|
279 |
|
|
for job in background_samples:
|
280 |
|
|
print job.name
|
281 |
|
|
htree = all_histos[job.name][0].values()[0]
|
282 |
|
|
if not i:
|
283 |
|
|
hDummy = copy(htree)
|
284 |
|
|
else:
|
285 |
|
|
hDummy.Add(htree,1)
|
286 |
|
|
del htree
|
287 |
|
|
i+=1
|
288 |
|
|
|
289 |
nmohr |
1.68 |
nData = 0
|
290 |
|
|
for job in data_histos:
|
291 |
|
|
if nData == 0:
|
292 |
|
|
theData = data_histos[job]
|
293 |
|
|
else:
|
294 |
|
|
theData.Add(data_histos[i])
|
295 |
peller |
1.11 |
|
296 |
nmohr |
1.68 |
#-- Write Files-----------------------------------------------------------------------------------
|
297 |
|
|
# generate the TH outfile:
|
298 |
|
|
outfile = ROOT.TFile(outpath+'vhbb_TH_'+ROOToutname+'.root', 'RECREATE')
|
299 |
|
|
outfile.mkdir(Datacardbin,Datacardbin)
|
300 |
|
|
outfile.cd(Datacardbin)
|
301 |
|
|
# generate the Workspace:
|
302 |
peller |
1.62 |
WS = ROOT.RooWorkspace('%s'%Datacardbin,'%s'%Datacardbin) #Zee
|
303 |
peller |
1.1 |
print 'WS initialized'
|
304 |
bortigno |
1.7 |
disc= ROOT.RooRealVar(name,name,xMin,xMax)
|
305 |
peller |
1.1 |
obs = ROOT.RooArgList(disc)
|
306 |
nmohr |
1.68 |
#
|
307 |
peller |
1.1 |
ROOT.gROOT.SetStyle("Plain")
|
308 |
peller |
1.53 |
|
309 |
nmohr |
1.68 |
#order and add all together
|
310 |
|
|
final_histos = {}
|
311 |
peller |
1.57 |
|
312 |
nmohr |
1.68 |
print '\n\t--> Ordering and Adding Histos\n'
|
313 |
peller |
1.53 |
|
314 |
nmohr |
1.68 |
#NOMINAL:
|
315 |
|
|
final_histos['nominal'] = HistoMaker.orderandadd([all_histos['%s'%job][0] for job in all_samples],setup)
|
316 |
peller |
1.53 |
|
317 |
nmohr |
1.68 |
#SYSTEMATICS:
|
318 |
|
|
ind = 1
|
319 |
|
|
for syst in systematics:
|
320 |
|
|
for Q in UD:
|
321 |
nmohr |
1.71 |
final_histos['%s_%s'%(systematicsnaming[syst],Q)] = HistoMaker.orderandadd([all_histos[job.name][ind] for job in all_samples],setup)
|
322 |
nmohr |
1.68 |
ind+=1
|
323 |
nmohr |
1.84 |
for weightF_sys in weightF_systematics:
|
324 |
nmohr |
1.68 |
for Q in UD:
|
325 |
nmohr |
1.84 |
final_histos['%s_%s'%(systematicsnaming[weightF_sys],Q)]= HistoMaker.orderandadd([all_histos[job.name][ind] for job in all_samples],setup)
|
326 |
nmohr |
1.68 |
ind+=1
|
327 |
|
|
|
328 |
peller |
1.83 |
if change_shapes:
|
329 |
|
|
for key in change_shapes:
|
330 |
|
|
syst,val=change_shapes[key].split('*')
|
331 |
peller |
1.85 |
final_histos[syst][key].Scale(float(val))
|
332 |
|
|
print 'scaled %s times %s val'%(syst,val)
|
333 |
peller |
1.83 |
|
334 |
|
|
|
335 |
nmohr |
1.71 |
def get_alternate_shape(hNominal,hAlternate):
|
336 |
|
|
hVar = hAlternate.Clone()
|
337 |
|
|
hNom = hNominal.Clone()
|
338 |
|
|
hAlt = hNom.Clone()
|
339 |
|
|
hNom.Add(hVar,-1.)
|
340 |
|
|
hAlt.Add(hNom)
|
341 |
|
|
for bin in range(0,hNominal.GetNbinsX()+1):
|
342 |
|
|
if hAlt.GetBinContent(bin) < 0.: hAlt.SetBinContent(bin,0.)
|
343 |
|
|
return hVar,hAlt
|
344 |
|
|
|
345 |
|
|
def get_alternate_shapes(all_histos,asample_dict,all_samples):
|
346 |
|
|
alternate_shapes_up = []
|
347 |
|
|
alternate_shapes_down = []
|
348 |
|
|
for job in all_samples:
|
349 |
|
|
nominal = all_histos[job.name][0]
|
350 |
|
|
if job.name in asample_dict:
|
351 |
|
|
alternate = copy(all_histos[asample_dict[job.name]][0])
|
352 |
|
|
hUp, hDown = get_alternate_shape(nominal[nominal.keys()[0]],alternate[alternate.keys()[0]])
|
353 |
|
|
alternate_shapes_up.append({nominal.keys()[0]:hUp})
|
354 |
|
|
alternate_shapes_down.append({nominal.keys()[0]:hDown})
|
355 |
|
|
else:
|
356 |
peller |
1.74 |
newh=nominal[nominal.keys()[0]].Clone('%s_%s_Up'%(nominal[nominal.keys()[0]].GetName(),'model'))
|
357 |
|
|
alternate_shapes_up.append({nominal.keys()[0]:nominal[nominal.keys()[0]].Clone()})
|
358 |
|
|
alternate_shapes_down.append({nominal.keys()[0]:nominal[nominal.keys()[0]].Clone()})
|
359 |
nmohr |
1.71 |
return alternate_shapes_up, alternate_shapes_down
|
360 |
|
|
|
361 |
|
|
if addSample_sys:
|
362 |
|
|
aUp, aDown = get_alternate_shapes(all_histos,addSample_sys,all_samples)
|
363 |
|
|
final_histos['%s_Up'%(systematicsnaming['model'])]= HistoMaker.orderandadd(aUp,setup)
|
364 |
peller |
1.74 |
del aUp
|
365 |
nmohr |
1.71 |
final_histos['%s_Down'%(systematicsnaming['model'])]= HistoMaker.orderandadd(aDown,setup)
|
366 |
|
|
|
367 |
peller |
1.77 |
|
368 |
|
|
if not ignore_stats:
|
369 |
|
|
#make statistical shapes:
|
370 |
peller |
1.87 |
if not binstat:
|
371 |
peller |
1.77 |
for Q in UD:
|
372 |
peller |
1.87 |
final_histos['%s_%s'%(systematicsnaming['stats'],Q)] = {}
|
373 |
|
|
for job,hist in final_histos['nominal'].items():
|
374 |
|
|
errorsum=0
|
375 |
peller |
1.77 |
for j in range(hist.GetNbinsX()+1):
|
376 |
peller |
1.87 |
errorsum=errorsum+(hist.GetBinError(j))**2
|
377 |
|
|
errorsum=sqrt(errorsum)
|
378 |
|
|
total=hist.Integral()
|
379 |
|
|
for Q in UD:
|
380 |
|
|
final_histos['%s_%s'%(systematicsnaming['stats'],Q)][job] = hist.Clone()
|
381 |
|
|
for j in range(hist.GetNbinsX()+1):
|
382 |
|
|
if Q == 'Up':
|
383 |
|
|
if rescaleSqrtN and total:
|
384 |
|
|
final_histos['%s_%s'%(systematicsnaming['stats'],Q)][job].SetBinContent(j,max(0,hist.GetBinContent(j)+hist.GetBinError(j)/total*errorsum))
|
385 |
|
|
else:
|
386 |
|
|
final_histos['%s_%s'%(systematicsnaming['stats'],Q)][job].SetBinContent(j,max(0,hist.GetBinContent(j)+hist.GetBinError(j)))
|
387 |
|
|
if Q == 'Down':
|
388 |
|
|
if rescaleSqrtN and total:
|
389 |
|
|
final_histos['%s_%s'%(systematicsnaming['stats'],Q)][job].SetBinContent(j,max(0,hist.GetBinContent(j)-hist.GetBinError(j)/total*errorsum))
|
390 |
|
|
else:
|
391 |
|
|
final_histos['%s_%s'%(systematicsnaming['stats'],Q)][job].SetBinContent(j,max(0,hist.GetBinContent(j)-hist.GetBinError(j)))
|
392 |
|
|
else:
|
393 |
nmohr |
1.91 |
binsBelowThreshold = {}
|
394 |
peller |
1.87 |
for bin in range(0,nBins):
|
395 |
|
|
for Q in UD:
|
396 |
|
|
final_histos['%s_bin%s_%s'%(systematicsnaming['stats'],bin,Q)] = {}
|
397 |
|
|
for job,hist in final_histos['nominal'].items():
|
398 |
nmohr |
1.91 |
binsBelowThreshold[job] = []
|
399 |
|
|
if hist.GetBinContent(bin) > 0.:
|
400 |
|
|
if hist.GetBinError(bin)/sqrt(hist.GetBinContent(bin)) > 0.5 and hist.GetBinContent(bin) >= 1.:
|
401 |
|
|
binsBelowThreshold[job].append(bin)
|
402 |
|
|
elif hist.GetBinError(bin)/(hist.GetBinContent(bin)) > 0.5 and hist.GetBinContent(bin) < 1.:
|
403 |
|
|
binsBelowThreshold[job].append(bin)
|
404 |
peller |
1.87 |
for Q in UD:
|
405 |
|
|
final_histos['%s_bin%s_%s'%(systematicsnaming['stats'],bin,Q)][job] = hist.Clone()
|
406 |
|
|
if Q == 'Up':
|
407 |
|
|
final_histos['%s_bin%s_%s'%(systematicsnaming['stats'],bin,Q)][job].SetBinContent(bin,max(0,hist.GetBinContent(bin)+hist.GetBinError(bin)))
|
408 |
|
|
if Q == 'Down':
|
409 |
|
|
final_histos['%s_bin%s_%s'%(systematicsnaming['stats'],bin,Q)][job].SetBinContent(bin,max(0,hist.GetBinContent(bin)-hist.GetBinError(bin)))
|
410 |
peller |
1.83 |
|
411 |
|
|
|
412 |
nmohr |
1.68 |
#write shapes in WS:
|
413 |
|
|
for key in final_histos:
|
414 |
|
|
for job, hist in final_histos[key].items():
|
415 |
|
|
if 'nominal' == key:
|
416 |
nmohr |
1.71 |
hist.SetName('%s'%(Dict[job]))
|
417 |
peller |
1.70 |
hist.Write()
|
418 |
nmohr |
1.71 |
rooDataHist = ROOT.RooDataHist('%s' %(Dict[job]),'%s'%(Dict[job]),obs, hist)
|
419 |
nmohr |
1.68 |
getattr(WS,'import')(rooDataHist)
|
420 |
|
|
for Q in UD:
|
421 |
|
|
if Q in key:
|
422 |
|
|
theSyst = key.replace('_%s'%Q,'')
|
423 |
|
|
else:
|
424 |
|
|
continue
|
425 |
nmohr |
1.71 |
if systematicsnaming['stats'] in key:
|
426 |
|
|
nameSyst = '%s_%s_%s' %(theSyst,Dict[job],Datacardbin)
|
427 |
|
|
elif systematicsnaming['model'] in key:
|
428 |
|
|
nameSyst = '%s_%s' %(theSyst,Dict[job])
|
429 |
nmohr |
1.68 |
else:
|
430 |
|
|
nameSyst = theSyst
|
431 |
nmohr |
1.71 |
hist.SetName('%s%s%s' %(Dict[job],nameSyst,Q))
|
432 |
peller |
1.70 |
hist.Write()
|
433 |
nmohr |
1.71 |
rooDataHist = ROOT.RooDataHist('%s%s%s' %(Dict[job],nameSyst,Q),'%s%s%s'%(Dict[job],nameSyst,Q),obs, hist)
|
434 |
nmohr |
1.68 |
getattr(WS,'import')(rooDataHist)
|
435 |
peller |
1.48 |
|
436 |
nmohr |
1.82 |
if toy:
|
437 |
|
|
hDummy.SetName('data_obs')
|
438 |
|
|
hDummy.Write()
|
439 |
|
|
rooDataHist = ROOT.RooDataHist('data_obs','data_obs',obs, hDummy)
|
440 |
peller |
1.75 |
else:
|
441 |
nmohr |
1.82 |
theData.SetName('data_obs')
|
442 |
|
|
theData.Write()
|
443 |
|
|
rooDataHist = ROOT.RooDataHist('data_obs','data_obs',obs, theData)
|
444 |
peller |
1.75 |
|
445 |
nmohr |
1.68 |
getattr(WS,'import')(rooDataHist)
|
446 |
peller |
1.13 |
|
447 |
nmohr |
1.68 |
WS.writeToFile(outpath+'vhbb_WS_'+ROOToutname+'.root')
|
448 |
peller |
1.47 |
|
449 |
nmohr |
1.68 |
# now we have a Dict final_histos with sets of all grouped MCs for all systematics:
|
450 |
|
|
# nominal, ($SYS_Up/Down)*4, weightF_sys_Up/Down, stats_Up/Down
|
451 |
peller |
1.36 |
|
452 |
peller |
1.69 |
print '\n\t >>> PRINTOUT PRETTY TABLE <<<\n'
|
453 |
|
|
#header
|
454 |
|
|
printout = ''
|
455 |
|
|
printout += '%-25s'%'Process'
|
456 |
|
|
printout += ':'
|
457 |
|
|
for item, val in final_histos['nominal'].items():
|
458 |
peller |
1.70 |
printout += '%-12s'%item
|
459 |
peller |
1.69 |
print printout+'\n'
|
460 |
nmohr |
1.68 |
for key in final_histos:
|
461 |
|
|
printout = ''
|
462 |
peller |
1.69 |
printout += '%-25s'%key
|
463 |
|
|
printout += ':'
|
464 |
nmohr |
1.68 |
for item, val in final_histos[key].items():
|
465 |
peller |
1.74 |
printout += '%-12s'%str('%0.5f'%val.Integral())
|
466 |
nmohr |
1.68 |
print printout
|
467 |
peller |
1.1 |
|
468 |
nmohr |
1.68 |
#-----------------------------------------------------------------------------------------------------------
|
469 |
peller |
1.36 |
|
470 |
peller |
1.62 |
# -------------------- write DATAcard: ----------------------------------------------------------------------
|
471 |
|
|
DCprocessseparatordict = {'WS':':','TH':'/'}
|
472 |
nmohr |
1.68 |
# create two datacards: for TH an WS
|
473 |
peller |
1.62 |
for DCtype in ['WS','TH']:
|
474 |
|
|
columns=len(setup)
|
475 |
|
|
f = open(outpath+'vhbb_DC_%s_%s.txt'%(DCtype,ROOToutname),'w')
|
476 |
|
|
f.write('imax\t1\tnumber of channels\n')
|
477 |
|
|
f.write('jmax\t%s\tnumber of backgrounds (\'*\' = automatic)\n'%(columns-1))
|
478 |
|
|
f.write('kmax\t*\tnumber of nuisance parameters (sources of systematical uncertainties)\n\n')
|
479 |
|
|
f.write('shapes * * vhbb_%s_%s.root $CHANNEL%s$PROCESS $CHANNEL%s$PROCESS$SYSTEMATIC\n\n'%(DCtype,ROOToutname,DCprocessseparatordict[DCtype],DCprocessseparatordict[DCtype]))
|
480 |
|
|
f.write('bin\t%s\n\n'%Datacardbin)
|
481 |
nmohr |
1.82 |
if toy:
|
482 |
peller |
1.76 |
f.write('observation\t%s\n\n'%(hDummy.Integral()))
|
483 |
peller |
1.75 |
else:
|
484 |
peller |
1.76 |
f.write('observation\t%s\n\n'%(theData.Integral()))
|
485 |
nmohr |
1.68 |
# datacard bin
|
486 |
peller |
1.62 |
f.write('bin')
|
487 |
|
|
for c in range(0,columns): f.write('\t%s'%Datacardbin)
|
488 |
|
|
f.write('\n')
|
489 |
nmohr |
1.68 |
# datacard process
|
490 |
peller |
1.62 |
f.write('process')
|
491 |
|
|
for c in setup: f.write('\t%s'%Dict[c])
|
492 |
|
|
f.write('\n')
|
493 |
|
|
f.write('process')
|
494 |
|
|
for c in range(0,columns): f.write('\t%s'%c)
|
495 |
|
|
f.write('\n')
|
496 |
nmohr |
1.68 |
# datacard yields
|
497 |
peller |
1.62 |
f.write('rate')
|
498 |
nmohr |
1.68 |
for c in setup:
|
499 |
|
|
f.write('\t%s'%final_histos['nominal'][c].Integral())
|
500 |
peller |
1.29 |
f.write('\n')
|
501 |
nmohr |
1.68 |
# get list of systematics in use
|
502 |
peller |
1.62 |
InUse=eval(config.get('Datacard','InUse_%s'%pt_region))
|
503 |
nmohr |
1.68 |
# write non-shape systematics
|
504 |
peller |
1.62 |
for item in InUse:
|
505 |
|
|
f.write(item)
|
506 |
|
|
what=eval(config.get('Datacard',item))
|
507 |
|
|
f.write('\t%s'%what['type'])
|
508 |
|
|
for c in setup:
|
509 |
|
|
if c in what:
|
510 |
nmohr |
1.92 |
if '_eff_e' in item and 'Zmm' in data_sample_names: f.write('\t-')
|
511 |
|
|
elif '_eff_m' in item and 'Zee' in data_sample_names: f.write('\t-')
|
512 |
|
|
elif '_trigger_e' in item and 'Zmm' in data_sample_names: f.write('\t-')
|
513 |
|
|
elif '_trigger_m' in item and 'Zee' in data_sample_names: f.write('\t-')
|
514 |
peller |
1.35 |
else:
|
515 |
peller |
1.62 |
f.write('\t%s'%what[c])
|
516 |
peller |
1.35 |
else:
|
517 |
|
|
f.write('\t-')
|
518 |
|
|
f.write('\n')
|
519 |
peller |
1.77 |
if not ignore_stats:
|
520 |
nmohr |
1.68 |
# Write statistical shape variations
|
521 |
peller |
1.77 |
if binstat:
|
522 |
|
|
for c in setup:
|
523 |
|
|
for bin in range(0,nBins):
|
524 |
nmohr |
1.91 |
if bin in binsBelowThreshold[c]:
|
525 |
|
|
f.write('%s_bin%s_%s_%s\tshape'%(systematicsnaming['stats'],bin,Dict[c],Datacardbin))
|
526 |
|
|
for it in range(0,columns):
|
527 |
|
|
if it == setup.index(c):
|
528 |
|
|
f.write('\t1.0')
|
529 |
|
|
else:
|
530 |
|
|
f.write('\t-')
|
531 |
|
|
f.write('\n')
|
532 |
peller |
1.77 |
else:
|
533 |
|
|
for c in setup:
|
534 |
peller |
1.87 |
f.write('%s_%s_%s\tshape'%(systematicsnaming['stats'],Dict[c],Datacardbin))
|
535 |
peller |
1.62 |
for it in range(0,columns):
|
536 |
|
|
if it == setup.index(c):
|
537 |
|
|
f.write('\t1.0')
|
538 |
|
|
else:
|
539 |
|
|
f.write('\t-')
|
540 |
|
|
f.write('\n')
|
541 |
nmohr |
1.68 |
# UEPS systematics
|
542 |
nmohr |
1.84 |
for weightF_sys in weightF_systematics:
|
543 |
|
|
f.write('%s\tshape' %(systematicsnaming[weightF_sys]))
|
544 |
peller |
1.62 |
for it in range(0,columns): f.write('\t1.0')
|
545 |
|
|
f.write('\n')
|
546 |
nmohr |
1.68 |
# additional sample systematics
|
547 |
peller |
1.62 |
if addSample_sys:
|
548 |
|
|
alreadyAdded = []
|
549 |
|
|
for newSample in addSample_sys.iterkeys():
|
550 |
|
|
for c in setup:
|
551 |
peller |
1.75 |
if not c == GroupDict[newSample]: continue
|
552 |
peller |
1.62 |
if Dict[c] in alreadyAdded: continue
|
553 |
nmohr |
1.71 |
f.write('%s_%s\tshape'%(systematicsnaming['model'],Dict[c]))
|
554 |
peller |
1.62 |
for it in range(0,columns):
|
555 |
|
|
if it == setup.index(c):
|
556 |
|
|
f.write('\t1.0')
|
557 |
|
|
else:
|
558 |
|
|
f.write('\t-')
|
559 |
|
|
f.write('\n')
|
560 |
|
|
alreadyAdded.append(Dict[c])
|
561 |
nmohr |
1.68 |
# regular systematics
|
562 |
peller |
1.62 |
for sys in systematics:
|
563 |
|
|
sys_factor=sys_factor_dict[sys]
|
564 |
|
|
f.write('%s\tshape'%systematicsnaming[sys])
|
565 |
peller |
1.63 |
for c in setup:
|
566 |
|
|
if c in sys_affecting[sys]:
|
567 |
|
|
f.write('\t%s'%sys_factor)
|
568 |
|
|
else:
|
569 |
|
|
f.write('\t-')
|
570 |
peller |
1.62 |
f.write('\n')
|
571 |
|
|
f.close()
|
572 |
nmohr |
1.68 |
# --------------------------------------------------------------------------
|
573 |
|
|
|
574 |
|
|
|
575 |
|
|
|
576 |
|
|
|
577 |
nmohr |
1.31 |
outfile.Close()
|