ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/UserCode/OSUT3Analysis/Configuration/scripts/mergeOutput.py
Revision: 1.19
Committed: Sat Jun 1 21:14:22 2013 UTC (11 years, 11 months ago) by ahart
Content type: text/x-python
Branch: MAIN
CVS Tags: V02-03-01, V02-03-00
Changes since 1.18: +28 -5 lines
Log Message:
Added a "-j" option to control the number of simultaneous processes used, similar to the "-j" option of "scram build". The default behavior is to use as many processes as there are CPU cores.

File Contents

# Content
1 #!/usr/bin/env python
2 import os
3 import sys
4 import fcntl
5 from optparse import OptionParser
6 from multiprocessing import Process, Queue, cpu_count
7
8 from OSUT3Analysis.Configuration.configurationOptions import *
9 from OSUT3Analysis.Configuration.processingUtilities import *
10
11 parser = OptionParser()
12 parser = set_commandline_arguments(parser)
13 parser.add_option("-j", "--nJobs", dest="nJobs", default=cpu_count(),
14 help="Set the number of processes to run simultaneously (default: number of CPUs)")
15 (arguments, args) = parser.parse_args()
16
17 if arguments.localConfig:
18 sys.path.append(os.getcwd())
19 exec("from " + arguments.localConfig.rstrip('.py') + " import *")
20
21 from ROOT import TFile, TH1D
22
23 condor_dir = set_condor_output_dir(arguments)
24
25 #save a list of composite datasets
26 composite_datasets = get_composite_datasets(datasets, composite_dataset_definitions)
27 #save a list of datasets with composite datasets split up
28 split_datasets = split_composite_datasets(datasets, composite_dataset_definitions)
29
30
31 #merge first layer
32 flags = TH1D ("flags", "", 1, 0, 1)
33 flags.GetXaxis ().SetBinLabel (1, "noWeights")
34 if arguments.noWeights:
35 flags.SetBinContent (1, 1)
36
37 def mergeDataset (dataset, q):
38 dataset_dir = "%s/%s" % (condor_dir,dataset)
39 if arguments.noWeights:
40 command = "mergeHists -w 1 -p %s %s" % (dataset_dir, dataset_dir)
41 else:
42 command = "mergeHists -l %g -p %s %s" % (intLumi, dataset_dir, dataset_dir)
43 command += " >> " + dataset_dir + ".out 2>&1"
44
45 output = open (dataset_dir + ".out", "w")
46 output.write ("Merging output for " + dataset + " dataset\n")
47 output.close ()
48
49 #print command
50 os.system(command)
51 fcntl.lockf (sys.stdout, fcntl.LOCK_EX)
52 output = open (dataset_dir + ".out", "r")
53 sys.stdout.write (output.read ())
54 output.close ()
55 os.unlink (dataset_dir + ".out")
56 fcntl.lockf (sys.stdout, fcntl.LOCK_UN)
57
58 fout = TFile (dataset_dir + ".root", "update")
59 fout.cd ()
60 flags.Write ()
61 fout.Close ()
62
63 q.put ("done")
64
65 processes = []
66 q = Queue ()
67 runningProcesses = 0
68 for dataset in split_datasets:
69 p = Process (target = mergeDataset, args = (dataset, q))
70 p.start ()
71 processes.append (p)
72 runningProcesses += 1
73 if runningProcesses == int (arguments.nJobs):
74 q.get ()
75 runningProcesses -= 1
76 for p in processes:
77 p.join ()
78 while runningProcesses > 0:
79 q.get ()
80 runningProcesses -= 1
81
82 #merge together components of composite datasets
83 def mergeCompositeDataset (composite_dataset, q):
84 component_datasets_list = ""
85 component_weights_list = ""
86 component_dataset_file_path = ""
87 composite_dataset_dir = "%s/%s" % (condor_dir,composite_dataset)
88 for component_dataset in composite_dataset_definitions[composite_dataset]:
89 component_dataset_dir = "%s/%s" % (condor_dir,component_dataset)
90 component_dataset_file_path = component_dataset_dir + ".root"
91 if os.path.isfile(component_dataset_file_path):
92 component_datasets_list += " " + component_dataset_file_path
93 if isinstance (composite_dataset_definitions[composite_dataset], dict):
94 if len (component_weights_list):
95 component_weights_list += "," + str (composite_dataset_definitions[composite_dataset][component_dataset])
96 else:
97 component_weights_list += str (composite_dataset_definitions[composite_dataset][component_dataset])
98 else:
99 if len (component_weights_list):
100 component_weights_list += ",1"
101 else:
102 component_weights_list += "1"
103 command = "mergeHists -w %s -p %s %s" % (component_weights_list, composite_dataset_dir, component_datasets_list)
104 command += " >> " + composite_dataset_dir + ".out 2>&1"
105
106 output = open (composite_dataset_dir + ".out", "w")
107 output.write ("Merging component datasets for " + composite_dataset + " dataset\n")
108 output.close ()
109
110 #print command
111 os.system(command)
112 fcntl.lockf (sys.stdout, fcntl.LOCK_EX)
113 output = open (composite_dataset_dir + ".out", "r")
114 sys.stdout.write (output.read ())
115 output.close ()
116 os.unlink (composite_dataset_dir + ".out")
117 fcntl.lockf (sys.stdout, fcntl.LOCK_UN)
118
119 fout = TFile (composite_dataset_dir + ".root", "update")
120 fout.cd ()
121 flags.Write ()
122 fout.Close ()
123
124 q.put ("done")
125
126 processes = []
127 runningProcesses -= runningProcesses
128 for composite_dataset in composite_datasets:
129 p = Process (target = mergeCompositeDataset, args = (composite_dataset, q))
130 p.start ()
131 processes.append (p)
132 runningProcesses += 1
133 if runningProcesses == int (arguments.nJobs):
134 q.get ()
135 runningProcesses -= 1
136 for p in processes:
137 p.join ()
138 while runningProcesses > 0:
139 q.get ()
140 runningProcesses -= 1
141
142 #recreate plots file with all datasets combined and in pretty colors
143 args = "-c %s" % condor_dir.partition('/')[2]
144
145 #pass all the options on to the plot and cutflow making scripts
146 if arguments.localConfig:
147 args = args + " -l " + arguments.localConfig
148 if arguments.normalizeToData:
149 args = args + " -n "
150 if arguments.normalizeToUnitArea:
151 args = args + " -u "
152 if arguments.noStack:
153 args = args + " -e "
154 if arguments.makeRatioPlots:
155 args = args + " -r "
156 if arguments.draw2DPlots:
157 args = args + " --2D "
158 if arguments.savePDFs:
159 args = args + " -p "
160 if arguments.outputFileName:
161 args = args + " -o " + arguments.outputFileName
162 if arguments.rebinFactor:
163 args = args + " -b " + arguments.rebinFactor
164
165 if not arguments.quickMerge:
166 print "running makePlots.py"
167 os.system("makePlots.py %s" % args)
168 print "running makeCutFlows.py"
169 os.system("makeCutFlows.py %s" % args)
170