1 |
|
#!/usr/bin/env python |
2 |
– |
import sys, os, string, re |
2 |
|
from DBSInfo import * |
3 |
|
|
4 |
|
|
5 |
|
# #################################### |
6 |
|
class DataDiscoveryError(exceptions.Exception): |
7 |
< |
def __init__(self, errorMessage): |
8 |
< |
args=errorMessage |
9 |
< |
exceptions.Exception.__init__(self, args) |
10 |
< |
pass |
11 |
< |
|
12 |
< |
def getErrorMessage(self): |
13 |
< |
""" Return exception error """ |
14 |
< |
return "%s" % (self.args) |
7 |
> |
def __init__(self, errorMessage): |
8 |
> |
self.args=errorMessage |
9 |
> |
exceptions.Exception.__init__(self, self.args) |
10 |
> |
pass |
11 |
> |
|
12 |
> |
def getErrorMessage(self): |
13 |
> |
""" Return exception error """ |
14 |
> |
return "%s" % (self.args) |
15 |
|
|
16 |
|
# #################################### |
17 |
|
class NotExistingDatasetError(exceptions.Exception): |
18 |
< |
def __init__(self, errorMessage): |
19 |
< |
args=errorMessage |
20 |
< |
exceptions.Exception.__init__(self, args) |
21 |
< |
pass |
22 |
< |
|
23 |
< |
def getErrorMessage(self): |
24 |
< |
""" Return exception error """ |
25 |
< |
return "%s" % (self.args) |
18 |
> |
def __init__(self, errorMessage): |
19 |
> |
self.args=errorMessage |
20 |
> |
exceptions.Exception.__init__(self, self.args) |
21 |
> |
pass |
22 |
> |
|
23 |
> |
def getErrorMessage(self): |
24 |
> |
""" Return exception error """ |
25 |
> |
return "%s" % (self.args) |
26 |
|
|
27 |
|
# #################################### |
28 |
|
class NoDataTierinProvenanceError(exceptions.Exception): |
29 |
< |
def __init__(self, errorMessage): |
30 |
< |
args=errorMessage |
31 |
< |
exceptions.Exception.__init__(self, args) |
32 |
< |
pass |
33 |
< |
|
34 |
< |
def getErrorMessage(self): |
35 |
< |
""" Return exception error """ |
36 |
< |
return "%s" % (self.args) |
29 |
> |
def __init__(self, errorMessage): |
30 |
> |
self.args=errorMessage |
31 |
> |
exceptions.Exception.__init__(self, self.args) |
32 |
> |
pass |
33 |
> |
|
34 |
> |
def getErrorMessage(self): |
35 |
> |
""" Return exception error """ |
36 |
> |
return "%s" % (self.args) |
37 |
|
|
38 |
|
# #################################### |
39 |
|
# class to find and extact info from published data |
40 |
|
class DataDiscovery: |
41 |
< |
def __init__(self, owner, dataset, dataTiers, cfg_params): |
41 |
> |
def __init__(self, datasetPath, cfg_params): |
42 |
|
|
43 |
|
# Attributes |
44 |
< |
self.owner = owner |
46 |
< |
self.dataset = dataset |
47 |
< |
self.dataTiers = dataTiers |
44 |
> |
self.datasetPath = datasetPath |
45 |
|
self.cfg_params = cfg_params |
46 |
|
|
47 |
< |
self.dbspaths= [] # DBS output: list of dbspaths for all data |
48 |
< |
self.allblocks = [] # DBS output: list of map fileblocks-totevts for all dataset-owners |
49 |
< |
self.blocksinfo = {} # DBS output: map fileblocks-totevts for the primary block, used internally to this class |
47 |
> |
self.eventsPerBlock = {} # DBS output: map fileblocks-events for collection |
48 |
> |
self.eventsPerFile = {} # DBS output: map files-events |
49 |
> |
self.blocksinfo = {} # DBS output: map fileblocks-files |
50 |
|
#DBS output: max events computed by method getMaxEvents |
51 |
|
|
52 |
|
# #################################### |
55 |
|
Contact DBS |
56 |
|
""" |
57 |
|
|
58 |
< |
## add the PU among the required data tiers if the Digi are requested |
59 |
< |
if (self.dataTiers.count('Digi')>0) & (self.dataTiers.count('PU')<=0) : |
60 |
< |
self.dataTiers.append('PU') |
58 |
> |
## get DBS URL |
59 |
> |
try: |
60 |
> |
dbs_url=self.cfg_params['CMSSW.dbs_url'] |
61 |
> |
except KeyError: |
62 |
> |
dbs_url="http://cmsdoc.cern.ch/cms/test/aprom/DBS/CGIServer/prodquery" |
63 |
|
|
64 |
< |
## get info about the requested dataset |
66 |
< |
dbs=DBSInfo() |
64 |
> |
## get info about the requested dataset |
65 |
|
try: |
66 |
< |
self.datasets = dbs.getMatchingDatasets(self.owner, self.dataset) |
67 |
< |
except DBSError, ex: |
68 |
< |
raise DataDiscoveryError(ex.getErrorMessage()) |
69 |
< |
if len(self.datasets) == 0: |
70 |
< |
raise DataDiscoveryError("Owner=%s, Dataset=%s unknown to DBS" % (self.owner, self.dataset)) |
73 |
< |
if len(self.datasets) > 1: |
74 |
< |
raise DataDiscoveryError("Owner=%s, Dataset=%s is ambiguous" % (self.owner, self.dataset)) |
66 |
> |
dbs_instance=self.cfg_params['CMSSW.dbs_instance'] |
67 |
> |
except KeyError: |
68 |
> |
dbs_instance="MCGlobal/Writer" |
69 |
> |
|
70 |
> |
dbs = DBSInfo(dbs_url, dbs_instance) |
71 |
|
try: |
72 |
< |
self.dbsdataset = self.datasets[0].get('datasetPathName') |
73 |
< |
self.blocksinfo = dbs.getDatasetContents(self.dbsdataset) |
74 |
< |
self.allblocks.append (self.blocksinfo.keys ()) # add also the current fileblocksinfo |
75 |
< |
self.dbspaths.append(self.dbsdataset) |
76 |
< |
except DBSError, ex: |
81 |
< |
raise DataDiscoveryError(ex.getErrorMessage()) |
82 |
< |
|
83 |
< |
if len(self.blocksinfo)<=0: |
84 |
< |
msg="\nERROR Data for %s do not exist in DBS! \n Check the dataset/owner variables in crab.cfg !"%self.dbsdataset |
85 |
< |
raise NotExistingDatasetError(msg) |
72 |
> |
self.datasets = dbs.getMatchingDatasets(self.datasetPath) |
73 |
> |
except DBS1API.dbsCgiApi.DbsCgiExecutionError, msg: |
74 |
> |
raise DataDiscoveryError(msg) |
75 |
> |
except DBSError, msg: |
76 |
> |
raise DataDiscoveryError(msg) |
77 |
|
|
78 |
+ |
if len(self.datasets) == 0: |
79 |
+ |
raise DataDiscoveryError("DatasetPath=%s unknown to DBS" %self.datasetPath) |
80 |
+ |
if len(self.datasets) > 1: |
81 |
+ |
raise DataDiscoveryError("DatasetPath=%s is ambiguous" %self.datasetPath) |
82 |
|
|
88 |
– |
## get info about the parents |
83 |
|
try: |
84 |
< |
parents=dbs.getDatasetProvenance(self.dbsdataset, self.dataTiers) |
91 |
< |
except DBSInvalidDataTierError, ex: |
92 |
< |
msg=ex.getErrorMessage()+' \n Check the data_tier variable in crab.cfg !\n' |
93 |
< |
raise DataDiscoveryError(msg) |
94 |
< |
except DBSError, ex: |
95 |
< |
raise DataDiscoveryError(ex.getErrorMessage()) |
84 |
> |
self.dbsdataset = self.datasets[0].get('datasetPathName') |
85 |
|
|
86 |
< |
## check that the user asks for parent Data Tier really existing in the DBS provenance |
87 |
< |
self.checkParentDataTier(parents, self.dataTiers) |
88 |
< |
|
100 |
< |
## for each parent get the corresponding fileblocks |
101 |
< |
try: |
102 |
< |
for p in parents: |
103 |
< |
## fill a list of dbspaths |
104 |
< |
parentPath = p.get('parent').get('datasetPathName') |
105 |
< |
self.dbspaths.append (parentPath) |
106 |
< |
parentBlocks = dbs.getDatasetContents (parentPath) |
107 |
< |
self.allblocks.append (parentBlocks.keys ()) # add parent fileblocksinfo |
86 |
> |
self.eventsPerBlock = dbs.getEventsPerBlock(self.dbsdataset) |
87 |
> |
self.blocksinfo = dbs.getDatasetFileBlocks(self.dbsdataset) |
88 |
> |
self.eventsPerFile = dbs.getEventsPerFile(self.dbsdataset) |
89 |
|
except DBSError, ex: |
90 |
|
raise DataDiscoveryError(ex.getErrorMessage()) |
91 |
< |
|
92 |
< |
# ################################################# |
93 |
< |
def checkParentDataTier(self, parents, dataTiers): |
94 |
< |
""" |
95 |
< |
check that the data tiers requested by the user really exists in the provenance of the given dataset |
115 |
< |
""" |
116 |
< |
startType = string.split(self.dbsdataset,'/')[2] |
117 |
< |
# for example 'type' is PU and 'dataTier' is Hit |
118 |
< |
parentTypes = map(lambda p: p.get('type'), parents) |
119 |
< |
for tier in dataTiers: |
120 |
< |
if parentTypes.count(tier) <= 0 and tier != startType: |
121 |
< |
msg="\nERROR Data %s not published in DBS with asked data tiers : the data tier not found is %s !\n Check the data_tier variable in crab.cfg !"%(self.dbsdataset,tier) |
122 |
< |
raise NoDataTierinProvenanceError(msg) |
91 |
> |
|
92 |
> |
if len(self.eventsPerBlock) <= 0: |
93 |
> |
raise NotExistingDatasetError (("\nNo data for %s in DBS\nPlease check" |
94 |
> |
+ " dataset path variables in crab.cfg") |
95 |
> |
% self.dbsdataset) |
96 |
|
|
97 |
|
|
98 |
|
# ################################################# |
99 |
|
def getMaxEvents(self): |
100 |
|
""" |
101 |
< |
max events of the primary dataset-owner |
101 |
> |
max events |
102 |
|
""" |
103 |
< |
## loop over the fileblocks of the primary dataset-owner |
103 |
> |
## loop over the event collections |
104 |
|
nevts=0 |
105 |
< |
for blockevts in self.blocksinfo.values(): |
106 |
< |
nevts=nevts+blockevts |
105 |
> |
for evc_evts in self.eventsPerBlock.values(): |
106 |
> |
nevts=nevts+evc_evts |
107 |
|
|
108 |
|
return nevts |
109 |
|
|
110 |
|
# ################################################# |
111 |
< |
def getDBSPaths(self): |
111 |
> |
def getEventsPerBlock(self): |
112 |
|
""" |
113 |
< |
list the DBSpaths for all required data |
113 |
> |
list the event collections structure by fileblock |
114 |
|
""" |
115 |
< |
return self.dbspaths |
115 |
> |
return self.eventsPerBlock |
116 |
|
|
117 |
|
# ################################################# |
118 |
< |
def getEVC(self): |
118 |
> |
def getEventsPerFile(self): |
119 |
|
""" |
120 |
< |
list the event collections structure by fileblock |
120 |
> |
list the event collections structure by file |
121 |
|
""" |
122 |
< |
print "To be used by a more complex job splitting... TODO later... " |
150 |
< |
print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo" |
122 |
> |
return self.eventsPerFile |
123 |
|
|
124 |
|
# ################################################# |
125 |
< |
def getFileBlocks(self): |
125 |
> |
def getFiles(self): |
126 |
|
""" |
127 |
< |
fileblocks for all required dataset-owners |
127 |
> |
return files grouped by fileblock |
128 |
|
""" |
129 |
< |
return self.allblocks |
129 |
> |
return self.blocksinfo |
130 |
|
|
131 |
|
######################################################################## |
160 |
– |
|
161 |
– |
|