3 |
|
import os |
4 |
|
import fileinput |
5 |
|
from array import * |
6 |
< |
import argparse |
6 |
> |
from optparse import OptionParser |
7 |
|
from OSUT3Analysis.Configuration.configurationOptions import * |
8 |
|
from OSUT3Analysis.Configuration.processingUtilities import * |
9 |
|
|
10 |
< |
parser = argparse.ArgumentParser() |
10 |
> |
parser = OptionParser() |
11 |
|
parser = set_commandline_arguments(parser) |
12 |
< |
|
13 |
< |
arguments = parser.parse_args() |
12 |
> |
(arguments, args) = parser.parse_args() |
13 |
|
|
14 |
|
if arguments.localConfig: |
15 |
|
sys.path.append(os.getcwd()) |
100 |
|
|
101 |
|
firstChannel = True |
102 |
|
|
103 |
< |
weight = intLumi / 10000.0 |
104 |
< |
for dataset in processed_datasets: |
105 |
< |
dataset_file = "%s/%s.root" % (condor_dir,dataset) |
106 |
< |
if types[dataset] != "data": |
107 |
< |
os.system("mergeTFileServiceHistograms -i %s -o %s -w %g" % (dataset_file, dataset_file + "_tmp", weight)) |
108 |
< |
else: |
109 |
< |
os.system("mergeTFileServiceHistograms -i %s -o %s -w %g" % (dataset_file, dataset_file + "_tmp", 1.0)) |
103 |
> |
#weight = intLumi / 10000.0 |
104 |
> |
#for dataset in processed_datasets: |
105 |
> |
# dataset_file = "%s/%s.root" % (condor_dir,dataset) |
106 |
> |
# fin = TFile (dataset_file) |
107 |
> |
# flags = fin.Get ("flags") |
108 |
> |
# noWeights = flags and flags.GetBinContent (1) |
109 |
> |
# fin.Close () |
110 |
> |
# if types[dataset] != "data" and not noWeights: |
111 |
> |
# os.system("mergeTFileServiceHistograms -i %s -o %s -w %g" % (dataset_file, dataset_file + "_tmp", weight)) |
112 |
> |
# else: |
113 |
> |
# os.system("mergeTFileServiceHistograms -i %s -o %s -w %g" % (dataset_file, dataset_file + "_tmp", 1.0)) |
114 |
|
|
115 |
|
for channel in channels: # loop over final states, which each have their own directory |
116 |
|
formatted_channel = channel.replace("_"," ") |
119 |
|
minusOneArgs = "" |
120 |
|
#print hist |
121 |
|
for dataset in processed_datasets: |
122 |
< |
dataset_file = "%s/%s.root_tmp" % (condor_dir,dataset) |
122 |
> |
dataset_file = "%s/%s.root" % (condor_dir,dataset) |
123 |
|
#print dataset_file |
124 |
|
cutFlowArgs = cutFlowArgs + " " + dataset_file |
125 |
|
selectionArgs = selectionArgs + " " + dataset_file |
159 |
|
fout.close () |
160 |
|
os.system("cutFlowTable -l %g %s >> %s" % (intLumi,minusOneArgs,texfile)) |
161 |
|
|
162 |
< |
for dataset in processed_datasets: |
163 |
< |
dataset_file = "%s/%s.root_tmp" % (condor_dir,dataset) |
164 |
< |
os.remove(dataset_file) |
162 |
> |
#for dataset in processed_datasets: |
163 |
> |
# dataset_file = "%s/%s.root_tmp" % (condor_dir,dataset) |
164 |
> |
# os.remove(dataset_file) |
165 |
|
|
166 |
|
#reformat tex files |
167 |
|
for line in fileinput.FileInput(texfile,inplace=1): |