ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/DataDiscovery.py
(Generate patch)

Comparing COMP/CRAB/python/DataDiscovery.py (file contents):
Revision 1.4 by afanfani, Sun Mar 12 01:00:47 2006 UTC vs.
Revision 1.42 by ewv, Mon Mar 22 21:17:15 2010 UTC

# Line 1 | Line 1
1 < #!/usr/bin/env python2
2 < import sys, os, string, re
3 < from DBSInfo import *
1 > #!/usr/bin/env python
2 >
3 > __revision__ = "$Id$"
4 > __version__ = "$Revision$"
5 >
6 > import exceptions
7 > import DBSAPI.dbsApi
8 > from DBSAPI.dbsApiException import *
9 > import common
10 > from crab_util import *
11 > from LumiList import LumiList
12 > import os
13 >
14 >
15 >
16 > class DBSError(exceptions.Exception):
17 >    def __init__(self, errorName, errorMessage):
18 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
19 >        exceptions.Exception.__init__(self, args)
20 >        pass
21 >
22 >    def getErrorMessage(self):
23 >        """ Return error message """
24 >        return "%s" % (self.args)
25 >
26 >
27 >
28 > class DBSInvalidDataTierError(exceptions.Exception):
29 >    def __init__(self, errorName, errorMessage):
30 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
31 >        exceptions.Exception.__init__(self, args)
32 >        pass
33 >
34 >    def getErrorMessage(self):
35 >        """ Return error message """
36 >        return "%s" % (self.args)
37 >
38 >
39 >
40 > class DBSInfoError:
41 >    def __init__(self, url):
42 >        print '\nERROR accessing DBS url : '+url+'\n'
43 >        pass
44 >
45  
46  
6 # ####################################
47   class DataDiscoveryError(exceptions.Exception):
48 <  def __init__(self, errorMessage):
49 <   args=errorMessage
50 <   exceptions.Exception.__init__(self, args)
51 <   pass
52 <
53 <  def getErrorMessage(self):
54 <   """ Return exception error """
55 <   return "%s" % (self.args)
48 >    def __init__(self, errorMessage):
49 >        self.args=errorMessage
50 >        exceptions.Exception.__init__(self, self.args)
51 >        pass
52 >
53 >    def getErrorMessage(self):
54 >        """ Return exception error """
55 >        return "%s" % (self.args)
56 >
57 >
58  
17 # ####################################
59   class NotExistingDatasetError(exceptions.Exception):
60 <  def __init__(self, errorMessage):
61 <   args=errorMessage
62 <   exceptions.Exception.__init__(self, args)
63 <   pass
64 <
65 <  def getErrorMessage(self):
66 <   """ Return exception error """
67 <   return "%s" % (self.args)
60 >    def __init__(self, errorMessage):
61 >        self.args=errorMessage
62 >        exceptions.Exception.__init__(self, self.args)
63 >        pass
64 >
65 >    def getErrorMessage(self):
66 >        """ Return exception error """
67 >        return "%s" % (self.args)
68 >
69 >
70  
28 # ####################################
71   class NoDataTierinProvenanceError(exceptions.Exception):
72 <  def __init__(self, errorMessage):
73 <   args=errorMessage
74 <   exceptions.Exception.__init__(self, args)
75 <   pass
76 <
77 <  def getErrorMessage(self):
78 <   """ Return exception error """
79 <   return "%s" % (self.args)
72 >    def __init__(self, errorMessage):
73 >        self.args=errorMessage
74 >        exceptions.Exception.__init__(self, self.args)
75 >        pass
76 >
77 >    def getErrorMessage(self):
78 >        """ Return exception error """
79 >        return "%s" % (self.args)
80  
39 # ####################################
40 # class to find and extact info from published data
41 class DataDiscovery:
42    def __init__(self, owner, dataset, dataTiers, cfg_params):
81  
82 < #       Attributes
83 <        self.owner = owner
84 <        self.dataset = dataset
85 <        self.dataTiers = dataTiers
82 >
83 > class DataDiscovery:
84 >    """
85 >    Class to find and extact info from published data
86 >    """
87 >    def __init__(self, datasetPath, cfg_params, skipAnBlocks):
88 >
89 >        #       Attributes
90 >        self.datasetPath = datasetPath
91 >        # Analysis dataset is primary/processed/tier/definition
92 >        self.ads = len(self.datasetPath.split("/")) > 4
93          self.cfg_params = cfg_params
94 +        self.skipBlocks = skipAnBlocks
95  
96 <        self.dbspaths= []     # DBS output: list of dbspaths for all data
97 <        self.allblocks = []   # DBS output: list of map fileblocks-totevts for all dataset-owners
98 <        self.blocksinfo = {}  # DBS output: map fileblocks-totevts for the primary block, used internally to this class
99 < #DBS output: max events computed by method getMaxEvents
96 >        self.eventsPerBlock = {}  # DBS output: map fileblocks-events for collection
97 >        self.eventsPerFile = {}   # DBS output: map files-events
98 > #         self.lumisPerBlock = {}   # DBS output: number of lumis in each block
99 > #         self.lumisPerFile = {}    # DBS output: number of lumis in each file
100 >        self.blocksinfo = {}      # DBS output: map fileblocks-files
101 >        self.maxEvents = 0        # DBS output: max events
102 >        self.maxLumis = 0         # DBS output: total number of lumis
103 >        self.parent = {}          # DBS output: parents of each file
104 >        self.lumis = {}           # DBS output: lumis in each file
105 >        self.lumiMask = None
106  
55 # ####################################
107      def fetchDBSInfo(self):
108          """
109          Contact DBS
110          """
111 <
112 <        ## add the PU among the required data tiers if the Digi are requested
113 <        if (self.dataTiers.count('Digi')>0) & (self.dataTiers.count('PU')<=0) :
114 <          self.dataTiers.append('PU')
115 <
116 <        ## get info about the requested dataset
117 <        dbs=DBSInfo()
118 <        self.datasets = dbs.getMatchingDatasets(self.owner, self.dataset)
119 <        if len(self.datasets) == 0:
120 <          raise DataDiscoveryError("Owner=%s, Dataset=%s unknown to DBS" % (self.owner, self.dataset))
121 <        if len(self.datasets) > 1:
122 <          raise DataDiscoveryError("Owner=%s, Dataset=%s is ambiguous" % (self.owner, self.dataset))
111 >        ## get DBS URL
112 >        global_url="http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet"
113 >        caf_url = "http://cmsdbsprod.cern.ch/cms_dbs_caf_analysis_01/servlet/DBSServlet"
114 >        dbs_url_map  =   {'glite':    global_url,
115 >                          'glite_slc5':global_url,\
116 >                          'glitecoll':global_url,\
117 >                          'condor':   global_url,\
118 >                          'condor_g': global_url,\
119 >                          'glidein':  global_url,\
120 >                          'lsf':      global_url,\
121 >                          'caf':      caf_url,\
122 >                          'sge':      global_url,\
123 >                          'arc':      global_url,\
124 >                          'pbs':      global_url
125 >                          }
126 >
127 >        dbs_url_default = dbs_url_map[(common.scheduler.name()).lower()]
128 >        dbs_url=  self.cfg_params.get('CMSSW.dbs_url', dbs_url_default)
129 >        common.logger.info("Accessing DBS at: "+dbs_url)
130 >
131 >        ## check if runs are selected
132 >        runselection = []
133 >        if (self.cfg_params.has_key('CMSSW.runselection')):
134 >            runselection = parseRange2(self.cfg_params['CMSSW.runselection'])
135 >
136 >        ## check if various lumi parameters are set
137 >        self.lumiMask = self.cfg_params.get('CMSSW.lumi_mask',None)
138 >        self.lumiParams = self.cfg_params.get('CMSSW.total_number_of_lumis',None) or \
139 >                          self.cfg_params.get('CMSSW.lumis_per_job',None)
140 >
141 >        lumiList = None
142 >        if self.lumiMask:
143 >            lumiList = LumiList(filename=self.lumiMask)
144 >
145 >        self.splitByRun = int(self.cfg_params.get('CMSSW.split_by_run', 0))
146 >
147 >        common.logger.log(10-1,"runselection is: %s"%runselection)
148 >        ## service API
149 >        args = {}
150 >        args['url']     = dbs_url
151 >        args['level']   = 'CRITICAL'
152 >
153 >        ## check if has been requested to use the parent info
154 >        useparent = int(self.cfg_params.get('CMSSW.use_parent',0))
155 >
156 >        ## check if has been asked for a non default file to store/read analyzed fileBlocks
157 >        defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt'
158 >        fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName))
159 >
160 >        api = DBSAPI.dbsApi.DbsApi(args)
161 >        self.files = self.queryDbs(api,path=self.datasetPath,runselection=runselection,useParent=useparent)
162 >
163 >        anFileBlocks = []
164 >        if self.skipBlocks: anFileBlocks = readTXTfile(self, fileBlocks_FileName)
165 >
166 >        # parse files and fill arrays
167 >        for file in self.files :
168 >            parList  = []
169 >            fileLumis = [] # List of tuples
170 >            # skip already analyzed blocks
171 >            fileblock = file['Block']['Name']
172 >            if fileblock not in anFileBlocks :
173 >                filename = file['LogicalFileName']
174 >                # asked retry the list of parent for the given child
175 >                if useparent==1:
176 >                    parList = [x['LogicalFileName'] for x in file['ParentList']]
177 >                if self.ads or self.lumiMask or self.lumiParams:
178 >                    fileLumis = [ (x['RunNumber'], x['LumiSectionNumber'])
179 >                                 for x in file['LumiList'] ]
180 >                self.parent[filename] = parList
181 >                # For LumiMask, intersection of two lists.
182 >                if self.lumiMask:
183 >                    self.lumis[filename] = lumiList.filterLumis(fileLumis)
184 >                else:
185 >                    self.lumis[filename] = fileLumis
186 >                if filename.find('.dat') < 0 :
187 >                    events    = file['NumberOfEvents']
188 >                    # Count number of events and lumis per block
189 >                    if fileblock in self.eventsPerBlock.keys() :
190 >                        self.eventsPerBlock[fileblock] += events
191 >                    else :
192 >                        self.eventsPerBlock[fileblock] = events
193 >                    # Number of events per file
194 >                    self.eventsPerFile[filename] = events
195 >
196 >                    # List of files per block
197 >                    if fileblock in self.blocksinfo.keys() :
198 >                        self.blocksinfo[fileblock].append(filename)
199 >                    else :
200 >                        self.blocksinfo[fileblock] = [filename]
201 >
202 >                    # total number of events
203 >                    self.maxEvents += events
204 >                    self.maxLumis  += len(self.lumis[filename])
205 >
206 >        if  self.skipBlocks and len(self.eventsPerBlock.keys()) == 0:
207 >            msg = "No new fileblocks available for dataset: "+str(self.datasetPath)
208 >            raise  CrabException(msg)
209 >
210 >        saveFblocks=''
211 >        for block in self.eventsPerBlock.keys() :
212 >            saveFblocks += str(block)+'\n'
213 >            common.logger.log(10-1,"DBSInfo: total nevts %i in block %s "%(self.eventsPerBlock[block],block))
214 >        writeTXTfile(self, fileBlocks_FileName , saveFblocks)
215 >
216 >        if len(self.eventsPerBlock) <= 0:
217 >            raise NotExistingDatasetError(("\nNo data for %s in DBS\nPlease check"
218 >                                            + " dataset path variables in crab.cfg")
219 >                                            % self.datasetPath)
220 >
221 >
222 >    def queryDbs(self,api,path=None,runselection=None,useParent=None):
223 >
224 >        allowedRetriveValue = ['retrive_block', 'retrive_run']
225 >        if self.ads or self.lumiMask or self.lumiParams:
226 >            allowedRetriveValue.append('retrive_lumi')
227 >        if useParent == 1: allowedRetriveValue.append('retrive_parent')
228 >        common.logger.debug("Set of input parameters used for DBS query: %s" % allowedRetriveValue)
229          try:
230 <          self.dbsdataset = self.datasets[0].getDatasetPath()
231 <          self.blocksinfo = dbs.getDatasetContents(self.dbsdataset)
232 <          self.allblocks.append (self.blocksinfo.keys ()) # add also the current fileblocksinfo
233 <          self.dbspaths.append(self.dbsdataset)
234 <        except DBSError, ex:
235 <          raise DataDiscoveryError(ex.getErrorMessage())
236 <        
237 <        if len(self.blocksinfo)<=0:
238 <         msg="\nERROR Data for %s do not exist in DBS! \n Check the dataset/owner variables in crab.cfg !"%self.dbsdataset
239 <         raise NotExistingDatasetError(msg)
230 >            if len(runselection) <=0 :
231 >                if useParent==1 or self.splitByRun==1 or self.ads or self.lumiMask or self.lumiParams:
232 >                    if self.ads:
233 >                        files = api.listFiles(analysisDataset=path, retriveList=allowedRetriveValue)
234 >                    else :
235 >                        files = api.listFiles(path=path, retriveList=allowedRetriveValue)
236 >                else:
237 >                    files = api.listDatasetFiles(self.datasetPath)
238 >            else :
239 >                files=[]
240 >                for arun in runselection:
241 >                    try:
242 >                        if self.ads:
243 >                            filesinrun = api.listFiles(analysisDataset=path,retriveList=allowedRetriveValue,runNumber=arun)
244 >                        else:
245 >                            filesinrun = api.listFiles(path=path,retriveList=allowedRetriveValue,runNumber=arun)
246 >                        files.extend(filesinrun)
247 >                    except:
248 >                        msg="WARNING: problem extracting info from DBS for run %s "%arun
249 >                        common.logger.info(msg)
250 >                        pass
251 >
252 >        except DbsBadRequest, msg:
253 >            raise DataDiscoveryError(msg)
254 >        except DBSError, msg:
255 >            raise DataDiscoveryError(msg)
256  
257 +        return files
258  
85        ## get info about the parents
86        try:
87          parents=dbs.getDatasetProvenance(self.dbsdataset, self.dataTiers)
88        except DBSInvalidDataTierError, ex:
89          msg=ex.getErrorMessage()+' \n Check the data_tier variable in crab.cfg !\n'
90          raise DataDiscoveryError(msg)
91        except DBSError, ex:
92          raise DataDiscoveryError(ex.getErrorMessage())
259  
260 <        ## check that the user asks for parent Data Tier really existing in the DBS provenance
261 <        self.checkParentDataTier(parents, self.dataTiers)
260 >    def getMaxEvents(self):
261 >        """
262 >        max events
263 >        """
264 >        return self.maxEvents
265  
97        ## for each parent get the corresponding fileblocks
98        try:
99          for p in parents:
100            ## fill a list of dbspaths
101            parentPath = p.getDatasetPath()
102            self.dbspaths.append (parentPath)
103            parentBlocks = dbs.getDatasetContents (parentPath)
104            self.allblocks.append (parentBlocks.keys ())  # add parent fileblocksinfo
105        except DBSError, ex:
106            raise DataDiscoveryError(ex.getErrorMessage())
107
108 # #################################################
109    def checkParentDataTier(self, parents, dataTiers):
110        """
111         check that the data tiers requested by the user really exists in the provenance of the given dataset
112        """
113        startType = string.split(self.dbsdataset,'/')[2]
114        parentTypes = map(lambda p: p.getDataType(), parents)
115        for tier in dataTiers:
116          if parentTypes.count(tier) <= 0 and tier != startType:
117            msg="\nERROR Data %s not published in DBS with asked data tiers : the data tier not found is %s !\n  Check the data_tier variable in crab.cfg !"%(self.dbsdataset,tier)
118            raise  NoDataTierinProvenanceError(msg)
266  
267 +    def getMaxLumis(self):
268 +        """
269 +        Return the number of lumis in the dataset
270 +        """
271 +        return self.maxLumis
272  
273 < # #################################################
274 <    def getMaxEvents(self):
273 >
274 >    def getEventsPerBlock(self):
275          """
276 <         max events of the primary dataset-owner
276 >        list the event collections structure by fileblock
277          """
278 <        ## loop over the fileblocks of the primary dataset-owner
127 <        nevts=0      
128 <        for blockevts in self.blocksinfo.values():
129 <          nevts=nevts+blockevts
278 >        return self.eventsPerBlock
279  
131        return nevts
280  
281 < # #################################################
134 <    def getDBSPaths(self):
281 >    def getEventsPerFile(self):
282          """
283 <         list the DBSpaths for all required data
283 >        list the event collections structure by file
284          """
285 <        return self.dbspaths
285 >        return self.eventsPerFile
286 >
287  
288 < # #################################################
141 <    def getEVC(self):
288 >    def getFiles(self):
289          """
290 <         list the event collections structure by fileblock
290 >        return files grouped by fileblock
291          """
292 <        print "To be used by a more complex job splitting... TODO later... "
146 <        print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo"
292 >        return self.blocksinfo
293  
294 < # #################################################
295 <    def getFileBlocks(self):
294 >
295 >    def getParent(self):
296          """
297 <         fileblocks for all required dataset-owners
297 >        return parent grouped by file
298          """
299 <        return self.allblocks        
299 >        return self.parent
300  
301 < ########################################################################
301 >
302 >    def getLumis(self):
303 >        """
304 >        return lumi sections grouped by file
305 >        """
306 >        return self.lumis
307  
308  
309 +    def getListFiles(self):
310 +        """
311 +        return parent grouped by file
312 +        """
313 +        return self.files

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines