ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/DataDiscovery.py
(Generate patch)

Comparing COMP/CRAB/python/DataDiscovery.py (file contents):
Revision 1.5 by afanfani, Thu May 18 18:46:22 2006 UTC vs.
Revision 1.48 by ewv, Tue Jul 6 16:31:55 2010 UTC

# Line 1 | Line 1
1 < #!/usr/bin/env python2
2 < import sys, os, string, re
3 < from DBSInfo import *
1 > #!/usr/bin/env python
2 >
3 > __revision__ = "$Id$"
4 > __version__ = "$Revision$"
5 >
6 > import exceptions
7 > import DBSAPI.dbsApi
8 > from DBSAPI.dbsApiException import *
9 > import common
10 > from crab_util import *
11 > try: # Can remove when CMSSW 3.7 and earlier are dropped
12 >    from FWCore.PythonUtilities.LumiList import LumiList
13 > except ImportError:
14 >    from LumiList import LumiList
15 >
16 > import os
17 >
18 >
19 >
20 > class DBSError(exceptions.Exception):
21 >    def __init__(self, errorName, errorMessage):
22 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
23 >        exceptions.Exception.__init__(self, args)
24 >        pass
25 >
26 >    def getErrorMessage(self):
27 >        """ Return error message """
28 >        return "%s" % (self.args)
29 >
30 >
31 >
32 > class DBSInvalidDataTierError(exceptions.Exception):
33 >    def __init__(self, errorName, errorMessage):
34 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
35 >        exceptions.Exception.__init__(self, args)
36 >        pass
37 >
38 >    def getErrorMessage(self):
39 >        """ Return error message """
40 >        return "%s" % (self.args)
41 >
42 >
43 >
44 > class DBSInfoError:
45 >    def __init__(self, url):
46 >        print '\nERROR accessing DBS url : '+url+'\n'
47 >        pass
48 >
49  
50  
6 # ####################################
51   class DataDiscoveryError(exceptions.Exception):
52 <  def __init__(self, errorMessage):
53 <   args=errorMessage
54 <   exceptions.Exception.__init__(self, args)
55 <   pass
56 <
57 <  def getErrorMessage(self):
58 <   """ Return exception error """
59 <   return "%s" % (self.args)
52 >    def __init__(self, errorMessage):
53 >        self.args=errorMessage
54 >        exceptions.Exception.__init__(self, self.args)
55 >        pass
56 >
57 >    def getErrorMessage(self):
58 >        """ Return exception error """
59 >        return "%s" % (self.args)
60 >
61 >
62  
17 # ####################################
63   class NotExistingDatasetError(exceptions.Exception):
64 <  def __init__(self, errorMessage):
65 <   args=errorMessage
66 <   exceptions.Exception.__init__(self, args)
67 <   pass
68 <
69 <  def getErrorMessage(self):
70 <   """ Return exception error """
71 <   return "%s" % (self.args)
64 >    def __init__(self, errorMessage):
65 >        self.args=errorMessage
66 >        exceptions.Exception.__init__(self, self.args)
67 >        pass
68 >
69 >    def getErrorMessage(self):
70 >        """ Return exception error """
71 >        return "%s" % (self.args)
72 >
73 >
74  
28 # ####################################
75   class NoDataTierinProvenanceError(exceptions.Exception):
76 <  def __init__(self, errorMessage):
77 <   args=errorMessage
78 <   exceptions.Exception.__init__(self, args)
79 <   pass
80 <
81 <  def getErrorMessage(self):
82 <   """ Return exception error """
83 <   return "%s" % (self.args)
76 >    def __init__(self, errorMessage):
77 >        self.args=errorMessage
78 >        exceptions.Exception.__init__(self, self.args)
79 >        pass
80 >
81 >    def getErrorMessage(self):
82 >        """ Return exception error """
83 >        return "%s" % (self.args)
84 >
85  
39 # ####################################
40 # class to find and extact info from published data
41 class DataDiscovery:
42    def __init__(self, owner, dataset, dataTiers, cfg_params):
86  
87 < #       Attributes
88 <        self.owner = owner
89 <        self.dataset = dataset
90 <        self.dataTiers = dataTiers
87 > class DataDiscovery:
88 >    """
89 >    Class to find and extact info from published data
90 >    """
91 >    def __init__(self, datasetPath, cfg_params, skipAnBlocks):
92 >
93 >        #       Attributes
94 >        self.datasetPath = datasetPath
95 >        # Analysis dataset is primary/processed/tier/definition
96 >        self.ads = len(self.datasetPath.split("/")) > 4
97          self.cfg_params = cfg_params
98 +        self.skipBlocks = skipAnBlocks
99  
100 <        self.dbspaths= []     # DBS output: list of dbspaths for all data
101 <        self.allblocks = []   # DBS output: list of map fileblocks-totevts for all dataset-owners
102 <        self.blocksinfo = {}  # DBS output: map fileblocks-totevts for the primary block, used internally to this class
103 < #DBS output: max events computed by method getMaxEvents
100 >        self.eventsPerBlock = {}  # DBS output: map fileblocks-events for collection
101 >        self.eventsPerFile = {}   # DBS output: map files-events
102 > #         self.lumisPerBlock = {}   # DBS output: number of lumis in each block
103 > #         self.lumisPerFile = {}    # DBS output: number of lumis in each file
104 >        self.blocksinfo = {}      # DBS output: map fileblocks-files
105 >        self.maxEvents = 0        # DBS output: max events
106 >        self.maxLumis = 0         # DBS output: total number of lumis
107 >        self.parent = {}          # DBS output: parents of each file
108 >        self.lumis = {}           # DBS output: lumis in each file
109 >        self.lumiMask = None
110 >        self.splitByLumi = False
111  
55 # ####################################
112      def fetchDBSInfo(self):
113          """
114          Contact DBS
115          """
116 <
117 <        ## add the PU among the required data tiers if the Digi are requested
118 <        if (self.dataTiers.count('Digi')>0) & (self.dataTiers.count('PU')<=0) :
119 <          self.dataTiers.append('PU')
120 <
121 <        ## get info about the requested dataset
122 <        dbs=DBSInfo()
116 >        ## get DBS URL
117 >        global_url="http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet"
118 >        dbs_url=  self.cfg_params.get('CMSSW.dbs_url', global_url)
119 >        common.logger.info("Accessing DBS at: "+dbs_url)
120 >
121 >        ## check if runs are selected
122 >        runselection = []
123 >        if (self.cfg_params.has_key('CMSSW.runselection')):
124 >            runselection = parseRange2(self.cfg_params['CMSSW.runselection'])
125 >
126 >        ## check if various lumi parameters are set
127 >        self.lumiMask = self.cfg_params.get('CMSSW.lumi_mask',None)
128 >        self.lumiParams = self.cfg_params.get('CMSSW.total_number_of_lumis',None) or \
129 >                          self.cfg_params.get('CMSSW.lumis_per_job',None)
130 >
131 >        lumiList = None
132 >        if self.lumiMask:
133 >            lumiList = LumiList(filename=self.lumiMask)
134 >        if runselection:
135 >            runList = LumiList(runs = runselection)
136 >
137 >        self.splitByRun = int(self.cfg_params.get('CMSSW.split_by_run', 0))
138 >        common.logger.log(10-1,"runselection is: %s"%runselection)
139 >
140 >        if not self.splitByRun:
141 >            self.splitByLumi = self.lumiMask or self.lumiParams or self.ads
142 >
143 >        if self.splitByRun and not runselection:
144 >            msg = "Error: split_by_run must be combined with a runselection"
145 >            raise CrabException(msg)
146 >
147 >        ## service API
148 >        args = {}
149 >        args['url']     = dbs_url
150 >        args['level']   = 'CRITICAL'
151 >
152 >        ## check if has been requested to use the parent info
153 >        useparent = int(self.cfg_params.get('CMSSW.use_parent',0))
154 >
155 >        ## check if has been asked for a non default file to store/read analyzed fileBlocks
156 >        defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt'
157 >        fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName))
158 >
159 >        api = DBSAPI.dbsApi.DbsApi(args)
160 >        self.files = self.queryDbs(api,path=self.datasetPath,runselection=runselection,useParent=useparent)
161 >
162 >        # Check to see what the dataset is
163 >        pdsName = self.datasetPath.split("/")[1]
164 >        primDSs = api.listPrimaryDatasets(pdsName)
165 >        dataType = primDSs[0]['Type']
166 >        common.logger.debug("Datatype is %s" % dataType)
167 >        if dataType == 'data' and not (self.splitByRun or self.splitByLumi):
168 >            msg = 'Data must be split by lumi or by run. ' \
169 >                  'Please see crab -help for the correct settings'
170 >            raise  CrabException(msg)
171 >
172 >
173 >
174 >        anFileBlocks = []
175 >        if self.skipBlocks: anFileBlocks = readTXTfile(self, fileBlocks_FileName)
176 >
177 >        # parse files and fill arrays
178 >        for file in self.files :
179 >            parList  = []
180 >            fileLumis = [] # List of tuples
181 >            # skip already analyzed blocks
182 >            fileblock = file['Block']['Name']
183 >            if fileblock not in anFileBlocks :
184 >                filename = file['LogicalFileName']
185 >                # asked retry the list of parent for the given child
186 >                if useparent==1:
187 >                    parList = [x['LogicalFileName'] for x in file['ParentList']]
188 >                if self.splitByLumi:
189 >                    fileLumis = [ (x['RunNumber'], x['LumiSectionNumber'])
190 >                                 for x in file['LumiList'] ]
191 >                self.parent[filename] = parList
192 >                # For LumiMask, intersection of two lists.
193 >                if self.lumiMask and runselection:
194 >                    self.lumis[filename] = runList.filterLumis(lumiList.filterLumis(fileLumis))
195 >                elif runselection:
196 >                    self.lumis[filename] = runList.filterLumis(fileLumis)
197 >                elif self.lumiMask:
198 >                    self.lumis[filename] = lumiList.filterLumis(fileLumis)
199 >                else:
200 >                    self.lumis[filename] = fileLumis
201 >                if filename.find('.dat') < 0 :
202 >                    events    = file['NumberOfEvents']
203 >                    # Count number of events and lumis per block
204 >                    if fileblock in self.eventsPerBlock.keys() :
205 >                        self.eventsPerBlock[fileblock] += events
206 >                    else :
207 >                        self.eventsPerBlock[fileblock] = events
208 >                    # Number of events per file
209 >                    self.eventsPerFile[filename] = events
210 >
211 >                    # List of files per block
212 >                    if fileblock in self.blocksinfo.keys() :
213 >                        self.blocksinfo[fileblock].append(filename)
214 >                    else :
215 >                        self.blocksinfo[fileblock] = [filename]
216 >
217 >                    # total number of events
218 >                    self.maxEvents += events
219 >                    self.maxLumis  += len(self.lumis[filename])
220 >
221 >        if  self.skipBlocks and len(self.eventsPerBlock.keys()) == 0:
222 >            msg = "No new fileblocks available for dataset: "+str(self.datasetPath)
223 >            raise  CrabException(msg)
224 >
225 >
226 >        if len(self.eventsPerBlock) <= 0:
227 >            raise NotExistingDatasetError(("\nNo data for %s in DBS\nPlease check"
228 >                                            + " dataset path variables in crab.cfg")
229 >                                            % self.datasetPath)
230 >
231 >
232 >    def queryDbs(self,api,path=None,runselection=None,useParent=None):
233 >
234 >
235 >        allowedRetriveValue = []
236 >        if self.splitByLumi or self.splitByRun or useParent == 1:
237 >            allowedRetriveValue.extend(['retrive_block', 'retrive_run'])
238 >        if self.splitByLumi:
239 >            allowedRetriveValue.append('retrive_lumi')
240 >        if useParent == 1:
241 >            allowedRetriveValue.append('retrive_parent')
242 >        common.logger.debug("Set of input parameters used for DBS query: %s" % allowedRetriveValue)
243          try:
244 <         self.datasets = dbs.getMatchingDatasets(self.owner, self.dataset)
245 <        except DBSError, ex:
246 <          raise DataDiscoveryError(ex.getErrorMessage())
247 <        if len(self.datasets) == 0:
248 <          raise DataDiscoveryError("Owner=%s, Dataset=%s unknown to DBS" % (self.owner, self.dataset))
249 <        if len(self.datasets) > 1:
250 <          raise DataDiscoveryError("Owner=%s, Dataset=%s is ambiguous" % (self.owner, self.dataset))
251 <        try:
252 <          self.dbsdataset = self.datasets[0].get('datasetPathName')
253 <          self.blocksinfo = dbs.getDatasetContents(self.dbsdataset)
254 <          self.allblocks.append (self.blocksinfo.keys ()) # add also the current fileblocksinfo
255 <          self.dbspaths.append(self.dbsdataset)
256 <        except DBSError, ex:
257 <          raise DataDiscoveryError(ex.getErrorMessage())
258 <        
259 <        if len(self.blocksinfo)<=0:
260 <         msg="\nERROR Data for %s do not exist in DBS! \n Check the dataset/owner variables in crab.cfg !"%self.dbsdataset
261 <         raise NotExistingDatasetError(msg)
244 >            if self.splitByRun:
245 >                files = []
246 >                for arun in runselection:
247 >                    try:
248 >                        if self.ads:
249 >                            filesinrun = api.listFiles(analysisDataset=path,retriveList=allowedRetriveValue,runNumber=arun)
250 >                        else:
251 >                            filesinrun = api.listFiles(path=path,retriveList=allowedRetriveValue,runNumber=arun)
252 >                        files.extend(filesinrun)
253 >                    except:
254 >                        msg="WARNING: problem extracting info from DBS for run %s "%arun
255 >                        common.logger.info(msg)
256 >                        pass
257 >
258 >            else:
259 >                if allowedRetriveValue:
260 >                    if self.ads:
261 >                        files = api.listFiles(analysisDataset=path, retriveList=allowedRetriveValue)
262 >                    else :
263 >                        files = api.listFiles(path=path, retriveList=allowedRetriveValue)
264 >                else:
265 >                    files = api.listDatasetFiles(self.datasetPath)
266 >
267 >        except DbsBadRequest, msg:
268 >            raise DataDiscoveryError(msg)
269 >        except DBSError, msg:
270 >            raise DataDiscoveryError(msg)
271  
272 +        return files
273  
88        ## get info about the parents
89        try:
90          parents=dbs.getDatasetProvenance(self.dbsdataset, self.dataTiers)
91        except DBSInvalidDataTierError, ex:
92          msg=ex.getErrorMessage()+' \n Check the data_tier variable in crab.cfg !\n'
93          raise DataDiscoveryError(msg)
94        except DBSError, ex:
95          raise DataDiscoveryError(ex.getErrorMessage())
274  
275 <        ## check that the user asks for parent Data Tier really existing in the DBS provenance
276 <        self.checkParentDataTier(parents, self.dataTiers)
275 >    def getMaxEvents(self):
276 >        """
277 >        max events
278 >        """
279 >        return self.maxEvents
280  
281 <        ## for each parent get the corresponding fileblocks
282 <        try:
283 <          for p in parents:
284 <            ## fill a list of dbspaths
285 <            parentPath = p.get('parent').get('datasetPathName')
286 <            self.dbspaths.append (parentPath)
106 <            parentBlocks = dbs.getDatasetContents (parentPath)
107 <            self.allblocks.append (parentBlocks.keys ())  # add parent fileblocksinfo
108 <        except DBSError, ex:
109 <            raise DataDiscoveryError(ex.getErrorMessage())
110 <
111 < # #################################################
112 <    def checkParentDataTier(self, parents, dataTiers):
113 <        """
114 <         check that the data tiers requested by the user really exists in the provenance of the given dataset
115 <        """
116 <        startType = string.split(self.dbsdataset,'/')[2]
117 <        # for example 'type' is PU and 'dataTier' is Hit
118 <        parentTypes = map(lambda p: p.get('type'), parents)
119 <        for tier in dataTiers:
120 <          if parentTypes.count(tier) <= 0 and tier != startType:
121 <            msg="\nERROR Data %s not published in DBS with asked data tiers : the data tier not found is %s !\n  Check the data_tier variable in crab.cfg !"%(self.dbsdataset,tier)
122 <            raise  NoDataTierinProvenanceError(msg)
281 >
282 >    def getMaxLumis(self):
283 >        """
284 >        Return the number of lumis in the dataset
285 >        """
286 >        return self.maxLumis
287  
288  
289 < # #################################################
126 <    def getMaxEvents(self):
289 >    def getEventsPerBlock(self):
290          """
291 <         max events of the primary dataset-owner
291 >        list the event collections structure by fileblock
292          """
293 <        ## loop over the fileblocks of the primary dataset-owner
131 <        nevts=0      
132 <        for blockevts in self.blocksinfo.values():
133 <          nevts=nevts+blockevts
293 >        return self.eventsPerBlock
294  
135        return nevts
295  
296 < # #################################################
138 <    def getDBSPaths(self):
296 >    def getEventsPerFile(self):
297          """
298 <         list the DBSpaths for all required data
298 >        list the event collections structure by file
299          """
300 <        return self.dbspaths
300 >        return self.eventsPerFile
301  
302 < # #################################################
303 <    def getEVC(self):
302 >
303 >    def getFiles(self):
304          """
305 <         list the event collections structure by fileblock
305 >        return files grouped by fileblock
306          """
307 <        print "To be used by a more complex job splitting... TODO later... "
308 <        print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo"
307 >        return self.blocksinfo
308 >
309  
310 < # #################################################
153 <    def getFileBlocks(self):
310 >    def getParent(self):
311          """
312 <         fileblocks for all required dataset-owners
312 >        return parent grouped by file
313          """
314 <        return self.allblocks        
314 >        return self.parent
315  
316 < ########################################################################
316 >
317 >    def getLumis(self):
318 >        """
319 >        return lumi sections grouped by file
320 >        """
321 >        return self.lumis
322  
323  
324 +    def getListFiles(self):
325 +        """
326 +        return parent grouped by file
327 +        """
328 +        return self.files

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines