1 |
|
#!/usr/bin/env python |
2 |
< |
import sys, os, string, re |
3 |
< |
from DBSInfo import * |
2 |
> |
|
3 |
> |
__revision__ = "$Id$" |
4 |
> |
__version__ = "$Revision$" |
5 |
> |
|
6 |
> |
import exceptions |
7 |
> |
import DBSAPI.dbsApi |
8 |
> |
from DBSAPI.dbsApiException import * |
9 |
> |
import common |
10 |
> |
from crab_util import * |
11 |
> |
from LumiList import LumiList |
12 |
> |
import os |
13 |
> |
|
14 |
> |
|
15 |
> |
|
16 |
> |
class DBSError(exceptions.Exception): |
17 |
> |
def __init__(self, errorName, errorMessage): |
18 |
> |
args='\nERROR DBS %s : %s \n'%(errorName,errorMessage) |
19 |
> |
exceptions.Exception.__init__(self, args) |
20 |
> |
pass |
21 |
> |
|
22 |
> |
def getErrorMessage(self): |
23 |
> |
""" Return error message """ |
24 |
> |
return "%s" % (self.args) |
25 |
> |
|
26 |
> |
|
27 |
> |
|
28 |
> |
class DBSInvalidDataTierError(exceptions.Exception): |
29 |
> |
def __init__(self, errorName, errorMessage): |
30 |
> |
args='\nERROR DBS %s : %s \n'%(errorName,errorMessage) |
31 |
> |
exceptions.Exception.__init__(self, args) |
32 |
> |
pass |
33 |
> |
|
34 |
> |
def getErrorMessage(self): |
35 |
> |
""" Return error message """ |
36 |
> |
return "%s" % (self.args) |
37 |
> |
|
38 |
> |
|
39 |
> |
|
40 |
> |
class DBSInfoError: |
41 |
> |
def __init__(self, url): |
42 |
> |
print '\nERROR accessing DBS url : '+url+'\n' |
43 |
> |
pass |
44 |
> |
|
45 |
|
|
46 |
|
|
6 |
– |
# #################################### |
47 |
|
class DataDiscoveryError(exceptions.Exception): |
48 |
|
def __init__(self, errorMessage): |
49 |
< |
args=errorMessage |
50 |
< |
exceptions.Exception.__init__(self, args) |
49 |
> |
self.args=errorMessage |
50 |
> |
exceptions.Exception.__init__(self, self.args) |
51 |
|
pass |
52 |
|
|
53 |
|
def getErrorMessage(self): |
54 |
|
""" Return exception error """ |
55 |
|
return "%s" % (self.args) |
56 |
|
|
57 |
< |
# #################################### |
57 |
> |
|
58 |
> |
|
59 |
|
class NotExistingDatasetError(exceptions.Exception): |
60 |
|
def __init__(self, errorMessage): |
61 |
< |
args=errorMessage |
62 |
< |
exceptions.Exception.__init__(self, args) |
61 |
> |
self.args=errorMessage |
62 |
> |
exceptions.Exception.__init__(self, self.args) |
63 |
|
pass |
64 |
|
|
65 |
|
def getErrorMessage(self): |
66 |
|
""" Return exception error """ |
67 |
|
return "%s" % (self.args) |
68 |
|
|
69 |
< |
# #################################### |
69 |
> |
|
70 |
> |
|
71 |
|
class NoDataTierinProvenanceError(exceptions.Exception): |
72 |
|
def __init__(self, errorMessage): |
73 |
< |
args=errorMessage |
74 |
< |
exceptions.Exception.__init__(self, args) |
73 |
> |
self.args=errorMessage |
74 |
> |
exceptions.Exception.__init__(self, self.args) |
75 |
|
pass |
76 |
|
|
77 |
|
def getErrorMessage(self): |
78 |
|
""" Return exception error """ |
79 |
|
return "%s" % (self.args) |
80 |
|
|
39 |
– |
# #################################### |
40 |
– |
# class to find and extact info from published data |
41 |
– |
class DataDiscovery: |
42 |
– |
def __init__(self, owner, dataset, dataTiers, cfg_params): |
81 |
|
|
82 |
< |
# Attributes |
83 |
< |
self.owner = owner |
84 |
< |
self.dataset = dataset |
85 |
< |
self.dataTiers = dataTiers |
82 |
> |
|
83 |
> |
class DataDiscovery: |
84 |
> |
""" |
85 |
> |
Class to find and extact info from published data |
86 |
> |
""" |
87 |
> |
def __init__(self, datasetPath, cfg_params, skipAnBlocks): |
88 |
> |
|
89 |
> |
# Attributes |
90 |
> |
self.datasetPath = datasetPath |
91 |
> |
# Analysis dataset is primary/processed/tier/definition |
92 |
> |
self.ads = len(self.datasetPath.split("/")) > 4 |
93 |
|
self.cfg_params = cfg_params |
94 |
+ |
self.skipBlocks = skipAnBlocks |
95 |
|
|
96 |
< |
self.dbspaths= [] # DBS output: list of dbspaths for all data |
97 |
< |
self.allblocks = [] # DBS output: list of map fileblocks-totevts for all dataset-owners |
98 |
< |
self.blocksinfo = {} # DBS output: map fileblocks-totevts for the primary block, used internally to this class |
99 |
< |
#DBS output: max events computed by method getMaxEvents |
96 |
> |
self.eventsPerBlock = {} # DBS output: map fileblocks-events for collection |
97 |
> |
self.eventsPerFile = {} # DBS output: map files-events |
98 |
> |
# self.lumisPerBlock = {} # DBS output: number of lumis in each block |
99 |
> |
# self.lumisPerFile = {} # DBS output: number of lumis in each file |
100 |
> |
self.blocksinfo = {} # DBS output: map fileblocks-files |
101 |
> |
self.maxEvents = 0 # DBS output: max events |
102 |
> |
self.maxLumis = 0 # DBS output: total number of lumis |
103 |
> |
self.parent = {} # DBS output: parents of each file |
104 |
> |
self.lumis = {} # DBS output: lumis in each file |
105 |
> |
self.lumiMask = None |
106 |
> |
self.splitByLumi = False |
107 |
|
|
55 |
– |
# #################################### |
108 |
|
def fetchDBSInfo(self): |
109 |
|
""" |
110 |
|
Contact DBS |
111 |
|
""" |
112 |
+ |
## get DBS URL |
113 |
+ |
global_url="http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet" |
114 |
+ |
dbs_url= self.cfg_params.get('CMSSW.dbs_url', global_url) |
115 |
+ |
common.logger.info("Accessing DBS at: "+dbs_url) |
116 |
+ |
|
117 |
+ |
## check if runs are selected |
118 |
+ |
runselection = [] |
119 |
+ |
if (self.cfg_params.has_key('CMSSW.runselection')): |
120 |
+ |
runselection = parseRange2(self.cfg_params['CMSSW.runselection']) |
121 |
+ |
|
122 |
+ |
## check if various lumi parameters are set |
123 |
+ |
self.lumiMask = self.cfg_params.get('CMSSW.lumi_mask',None) |
124 |
+ |
self.lumiParams = self.cfg_params.get('CMSSW.total_number_of_lumis',None) or \ |
125 |
+ |
self.cfg_params.get('CMSSW.lumis_per_job',None) |
126 |
+ |
|
127 |
+ |
lumiList = None |
128 |
+ |
if self.lumiMask: |
129 |
+ |
lumiList = LumiList(filename=self.lumiMask) |
130 |
+ |
if runselection: |
131 |
+ |
runList = LumiList(runs = runselection) |
132 |
+ |
|
133 |
+ |
self.splitByRun = int(self.cfg_params.get('CMSSW.split_by_run', 0)) |
134 |
+ |
common.logger.log(10-1,"runselection is: %s"%runselection) |
135 |
+ |
|
136 |
+ |
if not self.splitByRun: |
137 |
+ |
self.splitByLumi = self.lumiMask or self.lumiParams or self.ads |
138 |
+ |
|
139 |
+ |
## service API |
140 |
+ |
args = {} |
141 |
+ |
args['url'] = dbs_url |
142 |
+ |
args['level'] = 'CRITICAL' |
143 |
+ |
|
144 |
+ |
## check if has been requested to use the parent info |
145 |
+ |
useparent = int(self.cfg_params.get('CMSSW.use_parent',0)) |
146 |
+ |
|
147 |
+ |
## check if has been asked for a non default file to store/read analyzed fileBlocks |
148 |
+ |
defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt' |
149 |
+ |
fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName)) |
150 |
+ |
|
151 |
+ |
api = DBSAPI.dbsApi.DbsApi(args) |
152 |
+ |
self.files = self.queryDbs(api,path=self.datasetPath,runselection=runselection,useParent=useparent) |
153 |
+ |
|
154 |
+ |
# Check to see what the dataset is |
155 |
+ |
pdsName = self.datasetPath.split("/")[1] |
156 |
+ |
primDSs = api.listPrimaryDatasets(pdsName) |
157 |
+ |
dataType = primDSs[0]['Type'] |
158 |
+ |
common.logger.debug("Datatype is %s" % dataType) |
159 |
+ |
if dataType == 'data' and not (self.splitByRun or self.splitByLumi): |
160 |
+ |
msg = 'Data must be split by lumi or by run. ' \ |
161 |
+ |
'Please see crab -help for the correct settings' |
162 |
+ |
raise CrabException(msg) |
163 |
+ |
|
164 |
+ |
|
165 |
+ |
|
166 |
+ |
anFileBlocks = [] |
167 |
+ |
if self.skipBlocks: anFileBlocks = readTXTfile(self, fileBlocks_FileName) |
168 |
+ |
|
169 |
+ |
# parse files and fill arrays |
170 |
+ |
for file in self.files : |
171 |
+ |
parList = [] |
172 |
+ |
fileLumis = [] # List of tuples |
173 |
+ |
# skip already analyzed blocks |
174 |
+ |
fileblock = file['Block']['Name'] |
175 |
+ |
if fileblock not in anFileBlocks : |
176 |
+ |
filename = file['LogicalFileName'] |
177 |
+ |
# asked retry the list of parent for the given child |
178 |
+ |
if useparent==1: |
179 |
+ |
parList = [x['LogicalFileName'] for x in file['ParentList']] |
180 |
+ |
if self.splitByLumi: |
181 |
+ |
fileLumis = [ (x['RunNumber'], x['LumiSectionNumber']) |
182 |
+ |
for x in file['LumiList'] ] |
183 |
+ |
self.parent[filename] = parList |
184 |
+ |
# For LumiMask, intersection of two lists. |
185 |
+ |
if self.lumiMask and runselection: |
186 |
+ |
self.lumis[filename] = runList.filterLumis(lumiList.filterLumis(fileLumis)) |
187 |
+ |
elif runselection: |
188 |
+ |
self.lumis[filename] = runList.filterLumis(fileLumis) |
189 |
+ |
elif self.lumiMask: |
190 |
+ |
self.lumis[filename] = lumiList.filterLumis(fileLumis) |
191 |
+ |
else: |
192 |
+ |
self.lumis[filename] = fileLumis |
193 |
+ |
if filename.find('.dat') < 0 : |
194 |
+ |
events = file['NumberOfEvents'] |
195 |
+ |
# Count number of events and lumis per block |
196 |
+ |
if fileblock in self.eventsPerBlock.keys() : |
197 |
+ |
self.eventsPerBlock[fileblock] += events |
198 |
+ |
else : |
199 |
+ |
self.eventsPerBlock[fileblock] = events |
200 |
+ |
# Number of events per file |
201 |
+ |
self.eventsPerFile[filename] = events |
202 |
+ |
|
203 |
+ |
# List of files per block |
204 |
+ |
if fileblock in self.blocksinfo.keys() : |
205 |
+ |
self.blocksinfo[fileblock].append(filename) |
206 |
+ |
else : |
207 |
+ |
self.blocksinfo[fileblock] = [filename] |
208 |
+ |
|
209 |
+ |
# total number of events |
210 |
+ |
self.maxEvents += events |
211 |
+ |
self.maxLumis += len(self.lumis[filename]) |
212 |
+ |
|
213 |
+ |
if self.skipBlocks and len(self.eventsPerBlock.keys()) == 0: |
214 |
+ |
msg = "No new fileblocks available for dataset: "+str(self.datasetPath) |
215 |
+ |
raise CrabException(msg) |
216 |
+ |
|
217 |
+ |
|
218 |
+ |
if len(self.eventsPerBlock) <= 0: |
219 |
+ |
raise NotExistingDatasetError(("\nNo data for %s in DBS\nPlease check" |
220 |
+ |
+ " dataset path variables in crab.cfg") |
221 |
+ |
% self.datasetPath) |
222 |
+ |
|
223 |
+ |
|
224 |
+ |
def queryDbs(self,api,path=None,runselection=None,useParent=None): |
225 |
+ |
|
226 |
+ |
allowedRetriveValue = ['retrive_block', 'retrive_run'] |
227 |
+ |
if self.ads or self.lumiMask or self.lumiParams: |
228 |
+ |
allowedRetriveValue.append('retrive_lumi') |
229 |
+ |
if useParent == 1: allowedRetriveValue.append('retrive_parent') |
230 |
+ |
common.logger.debug("Set of input parameters used for DBS query: %s" % allowedRetriveValue) |
231 |
+ |
try: |
232 |
+ |
if len(runselection) <=0 or self.splitByLumi: |
233 |
+ |
if useParent==1 or self.splitByRun==1 or self.splitByLumi: |
234 |
+ |
if self.ads: |
235 |
+ |
files = api.listFiles(analysisDataset=path, retriveList=allowedRetriveValue) |
236 |
+ |
else : |
237 |
+ |
files = api.listFiles(path=path, retriveList=allowedRetriveValue) |
238 |
+ |
else: |
239 |
+ |
files = api.listDatasetFiles(self.datasetPath) |
240 |
+ |
else : |
241 |
+ |
files=[] |
242 |
+ |
for arun in runselection: |
243 |
+ |
try: |
244 |
+ |
if self.ads: |
245 |
+ |
filesinrun = api.listFiles(analysisDataset=path,retriveList=allowedRetriveValue,runNumber=arun) |
246 |
+ |
else: |
247 |
+ |
filesinrun = api.listFiles(path=path,retriveList=allowedRetriveValue,runNumber=arun) |
248 |
+ |
files.extend(filesinrun) |
249 |
+ |
except: |
250 |
+ |
msg="WARNING: problem extracting info from DBS for run %s "%arun |
251 |
+ |
common.logger.info(msg) |
252 |
+ |
pass |
253 |
|
|
254 |
< |
## add the PU among the required data tiers if the Digi are requested |
255 |
< |
if (self.dataTiers.count('Digi')>0) & (self.dataTiers.count('PU')<=0) : |
256 |
< |
self.dataTiers.append('PU') |
254 |
> |
except DbsBadRequest, msg: |
255 |
> |
raise DataDiscoveryError(msg) |
256 |
> |
except DBSError, msg: |
257 |
> |
raise DataDiscoveryError(msg) |
258 |
|
|
259 |
< |
## get info about the requested dataset |
66 |
< |
dbs=DBSInfo() |
67 |
< |
try: |
68 |
< |
self.datasets = dbs.getMatchingDatasets(self.owner, self.dataset) |
69 |
< |
except DBSError, ex: |
70 |
< |
raise DataDiscoveryError(ex.getErrorMessage()) |
71 |
< |
if len(self.datasets) == 0: |
72 |
< |
raise DataDiscoveryError("Owner=%s, Dataset=%s unknown to DBS" % (self.owner, self.dataset)) |
73 |
< |
if len(self.datasets) > 1: |
74 |
< |
raise DataDiscoveryError("Owner=%s, Dataset=%s is ambiguous" % (self.owner, self.dataset)) |
75 |
< |
try: |
76 |
< |
self.dbsdataset = self.datasets[0].get('datasetPathName') |
77 |
< |
self.blocksinfo = dbs.getDatasetContents(self.dbsdataset) |
78 |
< |
self.allblocks.append (self.blocksinfo.keys ()) # add also the current fileblocksinfo |
79 |
< |
self.dbspaths.append(self.dbsdataset) |
80 |
< |
except DBSError, ex: |
81 |
< |
raise DataDiscoveryError(ex.getErrorMessage()) |
82 |
< |
|
83 |
< |
if len(self.blocksinfo)<=0: |
84 |
< |
msg="\nERROR Data for %s do not exist in DBS! \n Check the dataset/owner variables in crab.cfg !"%self.dbsdataset |
85 |
< |
raise NotExistingDatasetError(msg) |
259 |
> |
return files |
260 |
|
|
261 |
|
|
262 |
< |
## get info about the parents |
263 |
< |
try: |
264 |
< |
parents=dbs.getDatasetProvenance(self.dbsdataset, self.dataTiers) |
265 |
< |
except DBSInvalidDataTierError, ex: |
266 |
< |
msg=ex.getErrorMessage()+' \n Check the data_tier variable in crab.cfg !\n' |
93 |
< |
raise DataDiscoveryError(msg) |
94 |
< |
except DBSError, ex: |
95 |
< |
raise DataDiscoveryError(ex.getErrorMessage()) |
262 |
> |
def getMaxEvents(self): |
263 |
> |
""" |
264 |
> |
max events |
265 |
> |
""" |
266 |
> |
return self.maxEvents |
267 |
|
|
97 |
– |
## check that the user asks for parent Data Tier really existing in the DBS provenance |
98 |
– |
self.checkParentDataTier(parents, self.dataTiers) |
268 |
|
|
269 |
< |
## for each parent get the corresponding fileblocks |
270 |
< |
try: |
271 |
< |
for p in parents: |
272 |
< |
## fill a list of dbspaths |
273 |
< |
parentPath = p.get('parent').get('datasetPathName') |
105 |
< |
self.dbspaths.append (parentPath) |
106 |
< |
parentBlocks = dbs.getDatasetContents (parentPath) |
107 |
< |
self.allblocks.append (parentBlocks.keys ()) # add parent fileblocksinfo |
108 |
< |
except DBSError, ex: |
109 |
< |
raise DataDiscoveryError(ex.getErrorMessage()) |
110 |
< |
|
111 |
< |
# ################################################# |
112 |
< |
def checkParentDataTier(self, parents, dataTiers): |
113 |
< |
""" |
114 |
< |
check that the data tiers requested by the user really exists in the provenance of the given dataset |
115 |
< |
""" |
116 |
< |
startType = string.split(self.dbsdataset,'/')[2] |
117 |
< |
# for example 'type' is PU and 'dataTier' is Hit |
118 |
< |
parentTypes = map(lambda p: p.get('type'), parents) |
119 |
< |
for tier in dataTiers: |
120 |
< |
if parentTypes.count(tier) <= 0 and tier != startType: |
121 |
< |
msg="\nERROR Data %s not published in DBS with asked data tiers : the data tier not found is %s !\n Check the data_tier variable in crab.cfg !"%(self.dbsdataset,tier) |
122 |
< |
raise NoDataTierinProvenanceError(msg) |
269 |
> |
def getMaxLumis(self): |
270 |
> |
""" |
271 |
> |
Return the number of lumis in the dataset |
272 |
> |
""" |
273 |
> |
return self.maxLumis |
274 |
|
|
275 |
|
|
276 |
< |
# ################################################# |
277 |
< |
def getMaxEvents(self): |
276 |
> |
def getEventsPerBlock(self): |
277 |
> |
""" |
278 |
> |
list the event collections structure by fileblock |
279 |
> |
""" |
280 |
> |
return self.eventsPerBlock |
281 |
> |
|
282 |
> |
|
283 |
> |
def getEventsPerFile(self): |
284 |
|
""" |
285 |
< |
max events of the primary dataset-owner |
285 |
> |
list the event collections structure by file |
286 |
|
""" |
287 |
< |
## loop over the fileblocks of the primary dataset-owner |
131 |
< |
nevts=0 |
132 |
< |
for blockevts in self.blocksinfo.values(): |
133 |
< |
nevts=nevts+blockevts |
287 |
> |
return self.eventsPerFile |
288 |
|
|
135 |
– |
return nevts |
289 |
|
|
290 |
< |
# ################################################# |
138 |
< |
def getDBSPaths(self): |
290 |
> |
def getFiles(self): |
291 |
|
""" |
292 |
< |
list the DBSpaths for all required data |
292 |
> |
return files grouped by fileblock |
293 |
|
""" |
294 |
< |
return self.dbspaths |
294 |
> |
return self.blocksinfo |
295 |
|
|
296 |
< |
# ################################################# |
297 |
< |
def getEVC(self): |
296 |
> |
|
297 |
> |
def getParent(self): |
298 |
|
""" |
299 |
< |
list the event collections structure by fileblock |
299 |
> |
return parent grouped by file |
300 |
|
""" |
301 |
< |
print "To be used by a more complex job splitting... TODO later... " |
302 |
< |
print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo" |
301 |
> |
return self.parent |
302 |
> |
|
303 |
|
|
304 |
< |
# ################################################# |
153 |
< |
def getFileBlocks(self): |
304 |
> |
def getLumis(self): |
305 |
|
""" |
306 |
< |
fileblocks for all required dataset-owners |
306 |
> |
return lumi sections grouped by file |
307 |
|
""" |
308 |
< |
return self.allblocks |
308 |
> |
return self.lumis |
309 |
|
|
310 |
< |
######################################################################## |
310 |
> |
|
311 |
> |
def getListFiles(self): |
312 |
> |
""" |
313 |
> |
return parent grouped by file |
314 |
> |
""" |
315 |
> |
return self.files |