1 |
< |
#!/usr/bin/env python2 |
2 |
< |
import sys, os, string, re |
1 |
> |
#!/usr/bin/env python |
2 |
|
from DBSInfo import * |
3 |
|
|
4 |
+ |
|
5 |
|
# #################################### |
6 |
< |
class DataDiscoveryError: |
7 |
< |
def __init__(self): |
8 |
< |
print '\nERROR accessing Data Discovery\n' |
6 |
> |
class DataDiscoveryError(exceptions.Exception): |
7 |
> |
def __init__(self, errorMessage): |
8 |
> |
self.args=errorMessage |
9 |
> |
exceptions.Exception.__init__(self, self.args) |
10 |
|
pass |
11 |
+ |
|
12 |
+ |
def getErrorMessage(self): |
13 |
+ |
""" Return exception error """ |
14 |
+ |
return "%s" % (self.args) |
15 |
+ |
|
16 |
|
# #################################### |
17 |
< |
class DatasetContentsError: |
18 |
< |
def __init__(self): |
19 |
< |
print '\nERROR accessing Data Discovery : getDatasetContents\n' |
17 |
> |
class NotExistingDatasetError(exceptions.Exception): |
18 |
> |
def __init__(self, errorMessage): |
19 |
> |
self.args=errorMessage |
20 |
> |
exceptions.Exception.__init__(self, self.args) |
21 |
|
pass |
22 |
|
|
23 |
+ |
def getErrorMessage(self): |
24 |
+ |
""" Return exception error """ |
25 |
+ |
return "%s" % (self.args) |
26 |
+ |
|
27 |
|
# #################################### |
28 |
< |
class DatasetProvenanceError: |
29 |
< |
def __init__(self): |
30 |
< |
print '\nERROR accessing Data Discovery : getDatasetProvenance\n' |
28 |
> |
class NoDataTierinProvenanceError(exceptions.Exception): |
29 |
> |
def __init__(self, errorMessage): |
30 |
> |
self.args=errorMessage |
31 |
> |
exceptions.Exception.__init__(self, self.args) |
32 |
|
pass |
33 |
|
|
34 |
+ |
def getErrorMessage(self): |
35 |
+ |
""" Return exception error """ |
36 |
+ |
return "%s" % (self.args) |
37 |
+ |
|
38 |
|
# #################################### |
39 |
|
# class to find and extact info from published data |
40 |
|
class DataDiscovery: |
41 |
< |
def __init__(self, owner, dataset, dataTiers, cfg_params): |
41 |
> |
def __init__(self, datasetPath, cfg_params): |
42 |
|
|
43 |
|
# Attributes |
44 |
< |
self.dbsdataset=dataset+'/datatier/'+owner |
29 |
< |
self.dataTiers = dataTiers |
44 |
> |
self.datasetPath = datasetPath |
45 |
|
self.cfg_params = cfg_params |
46 |
|
|
47 |
< |
self.dbspaths= [] # DBS output: list of dbspaths for all data |
48 |
< |
self.allblocks = [] # DBS output: list of map fileblocks-totevts for all dataset-owners |
49 |
< |
self.blocksinfo = {} # DBS output: map fileblocks-totevts for the primary block, used internally to this class |
47 |
> |
self.eventsPerBlock = {} # DBS output: map fileblocks-events for collection |
48 |
> |
self.eventsPerFile = {} # DBS output: map files-events |
49 |
> |
self.blocksinfo = {} # DBS output: map fileblocks-files |
50 |
|
#DBS output: max events computed by method getMaxEvents |
51 |
|
|
52 |
|
# #################################### |
54 |
|
""" |
55 |
|
Contact DBS |
56 |
|
""" |
42 |
– |
parents = [] |
43 |
– |
parentsblocksinfo = {} |
44 |
– |
self.dbspaths.append("/"+self.dbsdataset) # add the primary dbspath |
45 |
– |
# it might be replaced if one get from DBSAPI the primary dbspath as well |
57 |
|
|
58 |
< |
dbs=DBSInfo(self.dbsdataset,self.dataTiers) |
58 |
> |
## get DBS URL |
59 |
|
try: |
60 |
< |
self.blocksinfo=dbs.getDatasetContents() |
61 |
< |
except dbs.DBSError: |
62 |
< |
raise DataDiscoveryError |
60 |
> |
dbs_url=self.cfg_params['CMSSW.dbs_url'] |
61 |
> |
except KeyError: |
62 |
> |
dbs_url="http://cmsdoc.cern.ch/cms/test/aprom/DBS/CGIServer/prodquery" |
63 |
> |
|
64 |
> |
## get info about the requested dataset |
65 |
> |
try: |
66 |
> |
dbs_instance=self.cfg_params['CMSSW.dbs_instance'] |
67 |
> |
except KeyError: |
68 |
> |
dbs_instance="MCGlobal/Writer" |
69 |
> |
|
70 |
> |
dbs = DBSInfo(dbs_url, dbs_instance) |
71 |
|
try: |
72 |
< |
parents=dbs.getDatasetProvenance() |
73 |
< |
except: |
74 |
< |
raise DataDiscoveryError |
75 |
< |
|
76 |
< |
## for each parent get the corresponding fileblocks |
77 |
< |
for aparent in parents: |
78 |
< |
## fill a list of dbspaths |
79 |
< |
parentdbsdataset=aparent.getDatasetPath() |
80 |
< |
self.dbspaths.append(parentdbsdataset) |
81 |
< |
#tmppath=str(parentdbsdataset[1:-1]) |
63 |
< |
pdataset=string.split(parentdbsdataset,'/')[1] |
64 |
< |
pdt=string.split(parentdbsdataset,'/')[2] |
65 |
< |
powner=string.split(parentdbsdataset,'/')[3] |
66 |
< |
tmppath=pdataset+'/'+pdt+'/'+powner |
67 |
< |
## get the fileblocks of the parents : FIXME for the time being the first / in the path has to be removed |
68 |
< |
pdbs=DBSInfo(tmppath,[]) |
69 |
< |
try: |
70 |
< |
parentsblocksinfo=pdbs.getDatasetContents() |
71 |
< |
except: |
72 |
< |
raise DataDiscoveryError |
72 |
> |
self.datasets = dbs.getMatchingDatasets(self.datasetPath) |
73 |
> |
except DBS1API.dbsCgiApi.DbsCgiExecutionError, msg: |
74 |
> |
raise DataDiscoveryError(msg) |
75 |
> |
except DBSError, msg: |
76 |
> |
raise DataDiscoveryError(msg) |
77 |
> |
|
78 |
> |
if len(self.datasets) == 0: |
79 |
> |
raise DataDiscoveryError("DatasetPath=%s unknown to DBS" %self.datasetPath) |
80 |
> |
if len(self.datasets) > 1: |
81 |
> |
raise DataDiscoveryError("DatasetPath=%s is ambiguous" %self.datasetPath) |
82 |
|
|
83 |
< |
self.allblocks.append(parentsblocksinfo.keys()) # add parent fileblocksinfo |
83 |
> |
try: |
84 |
> |
self.dbsdataset = self.datasets[0].get('datasetPathName') |
85 |
|
|
86 |
< |
## all the required blocks |
87 |
< |
self.allblocks.append(self.blocksinfo.keys()) # add also the primary fileblocksinfo |
86 |
> |
self.eventsPerBlock = dbs.getEventsPerBlock(self.dbsdataset) |
87 |
> |
self.blocksinfo = dbs.getDatasetFileBlocks(self.dbsdataset) |
88 |
> |
self.eventsPerFile = dbs.getEventsPerFile(self.dbsdataset) |
89 |
> |
except DBSError, ex: |
90 |
> |
raise DataDiscoveryError(ex.getErrorMessage()) |
91 |
> |
|
92 |
> |
if len(self.eventsPerBlock) <= 0: |
93 |
> |
raise NotExistingDatasetError (("\nNo data for %s in DBS\nPlease check" |
94 |
> |
+ " dataset path variables in crab.cfg") |
95 |
> |
% self.dbsdataset) |
96 |
|
|
97 |
|
|
98 |
|
# ################################################# |
99 |
|
def getMaxEvents(self): |
100 |
|
""" |
101 |
< |
max events of the primary dataset-owner |
101 |
> |
max events |
102 |
|
""" |
103 |
< |
## loop over the fileblocks of the primary dataset-owner |
103 |
> |
## loop over the event collections |
104 |
|
nevts=0 |
105 |
< |
for blockevts in self.blocksinfo.values(): |
106 |
< |
nevts=nevts+blockevts |
105 |
> |
for evc_evts in self.eventsPerBlock.values(): |
106 |
> |
nevts=nevts+evc_evts |
107 |
|
|
108 |
|
return nevts |
109 |
|
|
110 |
|
# ################################################# |
111 |
< |
def getDBSPaths(self): |
111 |
> |
def getEventsPerBlock(self): |
112 |
|
""" |
113 |
< |
list the DBSpaths for all required data |
113 |
> |
list the event collections structure by fileblock |
114 |
|
""" |
115 |
< |
return self.dbspaths |
115 |
> |
return self.eventsPerBlock |
116 |
|
|
117 |
|
# ################################################# |
118 |
< |
def getEVC(self): |
118 |
> |
def getEventsPerFile(self): |
119 |
|
""" |
120 |
< |
list the event collections structure by fileblock |
120 |
> |
list the event collections structure by file |
121 |
|
""" |
122 |
< |
print "To be used by a more complex job splitting... TODO later... " |
105 |
< |
print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo" |
122 |
> |
return self.eventsPerFile |
123 |
|
|
124 |
|
# ################################################# |
125 |
< |
def getFileBlocks(self): |
125 |
> |
def getFiles(self): |
126 |
|
""" |
127 |
< |
fileblocks for all required dataset-owners |
127 |
> |
return files grouped by fileblock |
128 |
|
""" |
129 |
< |
return self.allblocks |
129 |
> |
return self.blocksinfo |
130 |
|
|
131 |
|
######################################################################## |
115 |
– |
|
116 |
– |
|