ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/DataDiscovery.py
(Generate patch)

Comparing COMP/CRAB/python/DataDiscovery.py (file contents):
Revision 1.7 by slacapra, Thu Jul 6 10:22:47 2006 UTC vs.
Revision 1.50 by belforte, Thu Sep 12 13:45:22 2013 UTC

# Line 1 | Line 1
1   #!/usr/bin/env python
2 < import sys, os, string, re
3 < from DBSInfo import *
2 >
3 > __revision__ = "$Id$"
4 > __version__ = "$Revision$"
5 >
6 > import exceptions
7 > import DBSAPI.dbsApi
8 > from DBSAPI.dbsApiException import *
9 > import common
10 > from crab_util import *
11 > try: # Can remove when CMSSW 3.7 and earlier are dropped
12 >    from FWCore.PythonUtilities.LumiList import LumiList
13 > except ImportError:
14 >    from LumiList import LumiList
15 >
16 > import os
17 >
18 >
19 >
20 > class DBSError(exceptions.Exception):
21 >    def __init__(self, errorName, errorMessage):
22 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
23 >        exceptions.Exception.__init__(self, args)
24 >        pass
25 >
26 >    def getErrorMessage(self):
27 >        """ Return error message """
28 >        return "%s" % (self.args)
29 >
30 >
31 >
32 > class DBSInvalidDataTierError(exceptions.Exception):
33 >    def __init__(self, errorName, errorMessage):
34 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
35 >        exceptions.Exception.__init__(self, args)
36 >        pass
37 >
38 >    def getErrorMessage(self):
39 >        """ Return error message """
40 >        return "%s" % (self.args)
41 >
42 >
43 >
44 > class DBSInfoError:
45 >    def __init__(self, url):
46 >        print '\nERROR accessing DBS url : '+url+'\n'
47 >        pass
48 >
49  
50  
6 # ####################################
51   class DataDiscoveryError(exceptions.Exception):
52      def __init__(self, errorMessage):
53 <        args=errorMessage
54 <        exceptions.Exception.__init__(self, args)
53 >        self.args=errorMessage
54 >        exceptions.Exception.__init__(self, self.args)
55          pass
56  
57      def getErrorMessage(self):
58          """ Return exception error """
59          return "%s" % (self.args)
60  
61 < # ####################################
61 >
62 >
63   class NotExistingDatasetError(exceptions.Exception):
64      def __init__(self, errorMessage):
65 <        args=errorMessage
66 <        exceptions.Exception.__init__(self, args)
65 >        self.args=errorMessage
66 >        exceptions.Exception.__init__(self, self.args)
67          pass
68  
69      def getErrorMessage(self):
70          """ Return exception error """
71          return "%s" % (self.args)
72  
73 < # ####################################
73 >
74 >
75   class NoDataTierinProvenanceError(exceptions.Exception):
76      def __init__(self, errorMessage):
77 <        args=errorMessage
78 <        exceptions.Exception.__init__(self, args)
77 >        self.args=errorMessage
78 >        exceptions.Exception.__init__(self, self.args)
79          pass
80  
81      def getErrorMessage(self):
82          """ Return exception error """
83          return "%s" % (self.args)
84  
39 # ####################################
40 # class to find and extact info from published data
41 class DataDiscovery:
42    def __init__(self, owner, dataset, dataTiers, cfg_params):
85  
86 < #       Attributes
87 <        self.owner = owner
88 <        self.dataset = dataset
89 <        self.dataTiers = dataTiers
86 >
87 > class DataDiscovery:
88 >    """
89 >    Class to find and extact info from published data
90 >    """
91 >    def __init__(self, datasetPath, cfg_params, skipAnBlocks):
92 >
93 >        #       Attributes
94 >        self.datasetPath = datasetPath
95 >        # Analysis dataset is primary/processed/tier/definition
96 >        self.ads = len(self.datasetPath.split("/")) > 4
97          self.cfg_params = cfg_params
98 +        self.skipBlocks = skipAnBlocks
99  
100 <        self.dbspaths= []     # DBS output: list of dbspaths for all data
101 <        self.allblocks = []   # DBS output: list of map fileblocks-totevts for all dataset-owners
102 <        self.blocksinfo = {}  # DBS output: map fileblocks-totevts for the primary block, used internally to this class
103 < #DBS output: max events computed by method getMaxEvents
100 >        self.eventsPerBlock = {}  # DBS output: map fileblocks-events for collection
101 >        self.eventsPerFile = {}   # DBS output: map files-events
102 > #         self.lumisPerBlock = {}   # DBS output: number of lumis in each block
103 > #         self.lumisPerFile = {}    # DBS output: number of lumis in each file
104 >        self.blocksinfo = {}      # DBS output: map fileblocks-files
105 >        self.maxEvents = 0        # DBS output: max events
106 >        self.maxLumis = 0         # DBS output: total number of lumis
107 >        self.parent = {}          # DBS output: parents of each file
108 >        self.lumis = {}           # DBS output: lumis in each file
109 >        self.lumiMask = None
110 >        self.splitByLumi = False
111 >        self.splitDataByEvent = 0
112  
55 # ####################################
113      def fetchDBSInfo(self):
114          """
115          Contact DBS
116          """
117 +        ## get DBS URL
118 +        global_url="http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet"
119 +        dbs_url=  self.cfg_params.get('CMSSW.dbs_url', global_url)
120 +        common.logger.info("Accessing DBS at: "+dbs_url)
121 +
122 +        ## check if runs are selected
123 +        runselection = []
124 +        if (self.cfg_params.has_key('CMSSW.runselection')):
125 +            runselection = parseRange2(self.cfg_params['CMSSW.runselection'])
126 +            if len(runselection)>1000000:
127 +                common.logger.info("ERROR: runselection range has more then 1M numbers")
128 +                common.logger.info("ERROR: Too large. runselection is ignored")
129 +                runselection=[]
130 +
131 +        ## check if various lumi parameters are set
132 +        self.lumiMask = self.cfg_params.get('CMSSW.lumi_mask',None)
133 +        self.lumiParams = self.cfg_params.get('CMSSW.total_number_of_lumis',None) or \
134 +                          self.cfg_params.get('CMSSW.lumis_per_job',None)
135 +
136 +        lumiList = None
137 +        if self.lumiMask:
138 +            lumiList = LumiList(filename=self.lumiMask)
139 +        if runselection:
140 +            runList = LumiList(runs = runselection)
141 +
142 +        self.splitByRun = int(self.cfg_params.get('CMSSW.split_by_run', 0))
143 +        self.splitDataByEvent = int(self.cfg_params.get('CMSSW.split_by_event', 0))
144 +        common.logger.log(10-1,"runselection is: %s"%runselection)
145 +
146 +        if not self.splitByRun:
147 +            self.splitByLumi = self.lumiMask or self.lumiParams or self.ads
148 +
149 +        if self.splitByRun and not runselection:
150 +            msg = "Error: split_by_run must be combined with a runselection"
151 +            raise CrabException(msg)
152 +
153 +        ## service API
154 +        args = {}
155 +        args['url']     = dbs_url
156 +        args['level']   = 'CRITICAL'
157 +
158 +        ## check if has been requested to use the parent info
159 +        useparent = int(self.cfg_params.get('CMSSW.use_parent',0))
160 +
161 +        ## check if has been asked for a non default file to store/read analyzed fileBlocks
162 +        defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt'
163 +        fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName))
164 +
165 +        api = DBSAPI.dbsApi.DbsApi(args)
166 +        self.files = self.queryDbs(api,path=self.datasetPath,runselection=runselection,useParent=useparent)
167 +
168 +        # Check to see what the dataset is
169 +        pdsName = self.datasetPath.split("/")[1]
170 +        primDSs = api.listPrimaryDatasets(pdsName)
171 +        dataType = primDSs[0]['Type']
172 +        common.logger.debug("Datatype is %s" % dataType)
173 +        if dataType == 'data' and not \
174 +            (self.splitByRun or self.splitByLumi or self.splitDataByEvent):
175 +            msg = 'Data must be split by lumi or by run. ' \
176 +                  'Please see crab -help for the correct settings'
177 +            raise  CrabException(msg)
178 +
179 +
180 +
181 +        anFileBlocks = []
182 +        if self.skipBlocks: anFileBlocks = readTXTfile(self, fileBlocks_FileName)
183 +
184 +        # parse files and fill arrays
185 +        for file in self.files :
186 +            parList  = []
187 +            fileLumis = [] # List of tuples
188 +            # skip already analyzed blocks
189 +            fileblock = file['Block']['Name']
190 +            if fileblock not in anFileBlocks :
191 +                filename = file['LogicalFileName']
192 +                # asked retry the list of parent for the given child
193 +                if useparent==1:
194 +                    parList = [x['LogicalFileName'] for x in file['ParentList']]
195 +                if self.splitByLumi:
196 +                    fileLumis = [ (x['RunNumber'], x['LumiSectionNumber'])
197 +                                 for x in file['LumiList'] ]
198 +                self.parent[filename] = parList
199 +                # For LumiMask, intersection of two lists.
200 +                if self.lumiMask and runselection:
201 +                    self.lumis[filename] = runList.filterLumis(lumiList.filterLumis(fileLumis))
202 +                elif runselection:
203 +                    self.lumis[filename] = runList.filterLumis(fileLumis)
204 +                elif self.lumiMask:
205 +                    self.lumis[filename] = lumiList.filterLumis(fileLumis)
206 +                else:
207 +                    self.lumis[filename] = fileLumis
208 +                if filename.find('.dat') < 0 :
209 +                    events    = file['NumberOfEvents']
210 +                    # Count number of events and lumis per block
211 +                    if fileblock in self.eventsPerBlock.keys() :
212 +                        self.eventsPerBlock[fileblock] += events
213 +                    else :
214 +                        self.eventsPerBlock[fileblock] = events
215 +                    # Number of events per file
216 +                    self.eventsPerFile[filename] = events
217 +
218 +                    # List of files per block
219 +                    if fileblock in self.blocksinfo.keys() :
220 +                        self.blocksinfo[fileblock].append(filename)
221 +                    else :
222 +                        self.blocksinfo[fileblock] = [filename]
223 +
224 +                    # total number of events
225 +                    self.maxEvents += events
226 +                    self.maxLumis  += len(self.lumis[filename])
227 +
228 +        if  self.skipBlocks and len(self.eventsPerBlock.keys()) == 0:
229 +            msg = "No new fileblocks available for dataset: "+str(self.datasetPath)
230 +            raise  CrabException(msg)
231 +
232 +
233 +        if len(self.eventsPerBlock) <= 0:
234 +            raise NotExistingDatasetError(("\nNo data for %s in DBS\nPlease check"
235 +                                            + " dataset path variables in crab.cfg")
236 +                                            % self.datasetPath)
237 +
238 +
239 +    def queryDbs(self,api,path=None,runselection=None,useParent=None):
240 +
241 +
242 +        allowedRetriveValue = []
243 +        if self.splitByLumi or self.splitByRun or useParent == 1:
244 +            allowedRetriveValue.extend(['retrive_block', 'retrive_run'])
245 +        if self.splitByLumi:
246 +            allowedRetriveValue.append('retrive_lumi')
247 +        if useParent == 1:
248 +            allowedRetriveValue.append('retrive_parent')
249 +        common.logger.debug("Set of input parameters used for DBS query: %s" % allowedRetriveValue)
250 +        try:
251 +            if self.splitByRun:
252 +                files = []
253 +                for arun in runselection:
254 +                    try:
255 +                        if self.ads:
256 +                            filesinrun = api.listFiles(analysisDataset=path,retriveList=allowedRetriveValue,runNumber=arun)
257 +                        else:
258 +                            filesinrun = api.listFiles(path=path,retriveList=allowedRetriveValue,runNumber=arun)
259 +                        files.extend(filesinrun)
260 +                    except:
261 +                        msg="WARNING: problem extracting info from DBS for run %s "%arun
262 +                        common.logger.info(msg)
263 +                        pass
264 +
265 +            else:
266 +                if allowedRetriveValue:
267 +                    if self.ads:
268 +                        files = api.listFiles(analysisDataset=path, retriveList=allowedRetriveValue)
269 +                    else :
270 +                        files = api.listFiles(path=path, retriveList=allowedRetriveValue)
271 +                else:
272 +                    files = api.listDatasetFiles(self.datasetPath)
273  
274 <        ## add the PU among the required data tiers if the Digi are requested
275 <        if (self.dataTiers.count('Digi')>0) & (self.dataTiers.count('PU')<=0) :
276 <            self.dataTiers.append('PU')
274 >        except DbsBadRequest, msg:
275 >            raise DataDiscoveryError(msg)
276 >        except DBSError, msg:
277 >            raise DataDiscoveryError(msg)
278  
279 <        ## get info about the requested dataset
66 <        dbs=DBSInfo()
67 <        try:
68 <            self.datasets = dbs.getMatchingDatasets(self.owner, self.dataset)
69 <        except DBSError, ex:
70 <            raise DataDiscoveryError(ex.getErrorMessage())
71 <        if len(self.datasets) == 0:
72 <            raise DataDiscoveryError("Owner=%s, Dataset=%s unknown to DBS" % (self.owner, self.dataset))
73 <        if len(self.datasets) > 1:
74 <            raise DataDiscoveryError("Owner=%s, Dataset=%s is ambiguous" % (self.owner, self.dataset))
75 <        try:
76 <            self.dbsdataset = self.datasets[0].get('datasetPathName')
77 <            self.blocksinfo = dbs.getDatasetContents(self.dbsdataset)
78 <            self.allblocks.append (self.blocksinfo.keys ()) # add also the current fileblocksinfo
79 <            self.dbspaths.append(self.dbsdataset)
80 <        except DBSError, ex:
81 <            raise DataDiscoveryError(ex.getErrorMessage())
82 <        
83 <        if len(self.blocksinfo)<=0:
84 <            msg="\nERROR Data for %s do not exist in DBS! \n Check the dataset/owner variables in crab.cfg !"%self.dbsdataset
85 <            raise NotExistingDatasetError(msg)
279 >        return files
280  
281  
282 <        ## get info about the parents
283 <        try:
284 <            parents=dbs.getDatasetProvenance(self.dbsdataset, self.dataTiers)
285 <        except DBSInvalidDataTierError, ex:
286 <            msg=ex.getErrorMessage()+' \n Check the data_tier variable in crab.cfg !\n'
93 <            raise DataDiscoveryError(msg)
94 <        except DBSError, ex:
95 <            raise DataDiscoveryError(ex.getErrorMessage())
282 >    def getMaxEvents(self):
283 >        """
284 >        max events
285 >        """
286 >        return self.maxEvents
287  
97        ## check that the user asks for parent Data Tier really existing in the DBS provenance
98        self.checkParentDataTier(parents, self.dataTiers)
288  
289 <        ## for each parent get the corresponding fileblocks
290 <        try:
291 <            for p in parents:
292 <                ## fill a list of dbspaths
293 <                parentPath = p.get('parent').get('datasetPathName')
105 <                self.dbspaths.append (parentPath)
106 <                parentBlocks = dbs.getDatasetContents (parentPath)
107 <                self.allblocks.append (parentBlocks.keys ())  # add parent fileblocksinfo
108 <            except DBSError, ex:
109 <                raise DataDiscoveryError(ex.getErrorMessage())
110 <
111 < # #################################################
112 <    def checkParentDataTier(self, parents, dataTiers):
113 <        """
114 <        check that the data tiers requested by the user really exists in the provenance of the given dataset
115 <        """
116 <        startType = string.split(self.dbsdataset,'/')[2]
117 <        # for example 'type' is PU and 'dataTier' is Hit
118 <        parentTypes = map(lambda p: p.get('type'), parents)
119 <        for tier in dataTiers:
120 <            if parentTypes.count(tier) <= 0 and tier != startType:
121 <                msg="\nERROR Data %s not published in DBS with asked data tiers : the data tier not found is %s !\n  Check the data_tier variable in crab.cfg !"%(self.dbsdataset,tier)
122 <                raise  NoDataTierinProvenanceError(msg)
289 >    def getMaxLumis(self):
290 >        """
291 >        Return the number of lumis in the dataset
292 >        """
293 >        return self.maxLumis
294  
295  
296 < # #################################################
297 <    def getMaxEvents(self):
296 >    def getEventsPerBlock(self):
297 >        """
298 >        list the event collections structure by fileblock
299 >        """
300 >        return self.eventsPerBlock
301 >
302 >
303 >    def getEventsPerFile(self):
304          """
305 <        max events of the primary dataset-owner
305 >        list the event collections structure by file
306          """
307 <        ## loop over the fileblocks of the primary dataset-owner
131 <        nevts=0      
132 <        for blockevts in self.blocksinfo.values():
133 <            nevts=nevts+blockevts
307 >        return self.eventsPerFile
308  
135        return nevts
309  
310 < # #################################################
138 <    def getDBSPaths(self):
310 >    def getFiles(self):
311          """
312 <        list the DBSpaths for all required data
312 >        return files grouped by fileblock
313          """
314 <        return self.dbspaths
314 >        return self.blocksinfo
315  
316 < # #################################################
317 <    def getEVC(self):
316 >
317 >    def getParent(self):
318          """
319 <        list the event collections structure by fileblock
319 >        return parent grouped by file
320          """
321 <        print "To be used by a more complex job splitting... TODO later... "
322 <        print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo"
321 >        return self.parent
322 >
323  
324 < # #################################################
153 <    def getFileBlocks(self):
324 >    def getLumis(self):
325          """
326 <        fileblocks for all required dataset-owners
326 >        return lumi sections grouped by file
327          """
328 <        return self.allblocks        
328 >        return self.lumis
329  
330 < ########################################################################
330 >
331 >    def getListFiles(self):
332 >        """
333 >        return parent grouped by file
334 >        """
335 >        return self.files

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines