ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/DataDiscovery.py
(Generate patch)

Comparing COMP/CRAB/python/DataDiscovery.py (file contents):
Revision 1.1 by afanfani, Mon Nov 7 23:17:44 2005 UTC vs.
Revision 1.36 by spiga, Thu Nov 19 18:04:20 2009 UTC

# Line 1 | Line 1
1 < #!/usr/bin/env python2
2 < import sys, os, string, re
3 < from DBSInfo import *
1 > #!/usr/bin/env python
2  
3 < # ####################################
4 < class DataDiscoveryError:
5 <    def __init__(self):
6 <        print '\nERROR accessing Data Discovery\n'
3 > __revision__ = "$Id$"
4 > __version__ = "$Revision$"
5 >
6 > import exceptions
7 > import DBSAPI.dbsApi
8 > from DBSAPI.dbsApiException import *
9 > import common
10 > from crab_util import *
11 > import os
12 >
13 >
14 >
15 > class DBSError(exceptions.Exception):
16 >    def __init__(self, errorName, errorMessage):
17 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
18 >        exceptions.Exception.__init__(self, args)
19          pass
20 < # ####################################
21 < class DatasetContentsError:
22 <    def __init__(self):
23 <        print '\nERROR accessing Data Discovery : getDatasetContents\n'
20 >
21 >    def getErrorMessage(self):
22 >        """ Return error message """
23 >        return "%s" % (self.args)
24 >
25 >
26 >
27 > class DBSInvalidDataTierError(exceptions.Exception):
28 >    def __init__(self, errorName, errorMessage):
29 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
30 >        exceptions.Exception.__init__(self, args)
31          pass
32  
33 < # ####################################
34 < class DatasetProvenanceError:
35 <    def __init__(self):
36 <        print '\nERROR accessing Data Discovery : getDatasetProvenance\n'
33 >    def getErrorMessage(self):
34 >        """ Return error message """
35 >        return "%s" % (self.args)
36 >
37 >
38 >
39 > class DBSInfoError:
40 >    def __init__(self, url):
41 >        print '\nERROR accessing DBS url : '+url+'\n'
42          pass
43  
22 # ####################################
23 # class to find and extact info from published data
24 class DataDiscovery:
25    def __init__(self, owner, dataset, dataTiers, cfg_params):
44  
45 < #       Attributes
46 <        self.dbsdataset=dataset+'/datatier/'+owner
47 <        self.dataTiers = dataTiers
45 >
46 > class DataDiscoveryError(exceptions.Exception):
47 >    def __init__(self, errorMessage):
48 >        self.args=errorMessage
49 >        exceptions.Exception.__init__(self, self.args)
50 >        pass
51 >
52 >    def getErrorMessage(self):
53 >        """ Return exception error """
54 >        return "%s" % (self.args)
55 >
56 >
57 >
58 > class NotExistingDatasetError(exceptions.Exception):
59 >    def __init__(self, errorMessage):
60 >        self.args=errorMessage
61 >        exceptions.Exception.__init__(self, self.args)
62 >        pass
63 >
64 >    def getErrorMessage(self):
65 >        """ Return exception error """
66 >        return "%s" % (self.args)
67 >
68 >
69 >
70 > class NoDataTierinProvenanceError(exceptions.Exception):
71 >    def __init__(self, errorMessage):
72 >        self.args=errorMessage
73 >        exceptions.Exception.__init__(self, self.args)
74 >        pass
75 >
76 >    def getErrorMessage(self):
77 >        """ Return exception error """
78 >        return "%s" % (self.args)
79 >
80 >
81 >
82 > class DataDiscovery:
83 >    """
84 >    Class to find and extact info from published data
85 >    """
86 >    def __init__(self, datasetPath, cfg_params, skipAnBlocks):
87 >
88 >        #       Attributes
89 >        self.datasetPath = datasetPath
90 >        # Analysis dataset is primary/processed/tier/definition
91 >        self.ads = len(self.datasetPath.split("/")) > 4
92          self.cfg_params = cfg_params
93 +        self.skipBlocks = skipAnBlocks
94 +
95 +        self.eventsPerBlock = {}  # DBS output: map fileblocks-events for collection
96 +        self.eventsPerFile = {}   # DBS output: map files-events
97 + #         self.lumisPerBlock = {}   # DBS output: number of lumis in each block
98 + #         self.lumisPerFile = {}    # DBS output: number of lumis in each file
99 +        self.blocksinfo = {}      # DBS output: map fileblocks-files
100 +        self.maxEvents = 0        # DBS output: max events
101 +        self.maxLumis = 0         # DBS output: total number of lumis
102 +        self.parent = {}          # DBS output: parents of each file
103 +        self.lumis = {}           # DBS output: lumis in each file
104  
32        self.dbspaths= []  # DBS output: list of dbspaths for all data
33        self.allblocks = []   # DBS output: list of map fileblocks-totevts for all dataset-owners
34        self.blocksinfo = {}     # DBS output: map fileblocks-totevts for the primary block, used internally to this class
35 #DBS output: max events computed by method getMaxEvents
105  
37 # ####################################
106      def fetchDBSInfo(self):
107          """
108          Contact DBS
109          """
110 <        parents = []
111 <        parentsblocksinfo = {}
112 <        self.dbspaths.append("/"+self.dbsdataset) # add the primary dbspath
113 <                                                  # it might be replaced if one get from DBSAPI the primary dbspath as well
114 <
115 <        dbs=DBSInfo(self.dbsdataset,self.dataTiers)
116 <        try:
117 <          self.blocksinfo=dbs.getDatasetContents()
118 <        except dbs.DBSError:
119 <          raise DataDiscoveryError
110 >        ## get DBS URL
111 >        global_url="http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet"
112 >        caf_url = "http://cmsdbsprod.cern.ch/cms_dbs_caf_analysis_01/servlet/DBSServlet"
113 >        dbs_url_map  =   {'glite':    global_url,
114 >                          'glitecoll':global_url,\
115 >                          'condor':   global_url,\
116 >                          'condor_g': global_url,\
117 >                          'glidein':  global_url,\
118 >                          'lsf':      global_url,\
119 >                          'caf':      caf_url,\
120 >                          'sge':      global_url,\
121 >                          'arc':      global_url,\
122 >                          'pbs':      global_url
123 >                          }
124 >
125 >        dbs_url_default = dbs_url_map[(common.scheduler.name()).lower()]
126 >        dbs_url=  self.cfg_params.get('CMSSW.dbs_url', dbs_url_default)
127 >        common.logger.info("Accessing DBS at: "+dbs_url)
128 >
129 >        ## check if runs are selected
130 >        runselection = []
131 >        if (self.cfg_params.has_key('CMSSW.runselection')):
132 >            runselection = parseRange2(self.cfg_params['CMSSW.runselection'])
133 >
134 >
135 >        self.splitByRun = int(self.cfg_params.get('CMSSW.split_by_run', 0))
136 >
137 >        common.logger.log(10-1,"runselection is: %s"%runselection)
138 >        ## service API
139 >        args = {}
140 >        args['url']     = dbs_url
141 >        args['level']   = 'CRITICAL'
142 >
143 >        ## check if has been requested to use the parent info
144 >        useparent = int(self.cfg_params.get('CMSSW.use_parent',0))
145 >
146 >        ## check if has been asked for a non default file to store/read analyzed fileBlocks
147 >        defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt'
148 >        fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName))
149 >
150 >        api = DBSAPI.dbsApi.DbsApi(args)
151 >        self.files = self.queryDbs(api,path=self.datasetPath,runselection=runselection,useParent=useparent)
152 >
153 >        anFileBlocks = []
154 >        if self.skipBlocks: anFileBlocks = readTXTfile(self, fileBlocks_FileName)
155 >
156 >        # parse files and fill arrays
157 >        for file in self.files :
158 >            parList  = []
159 >            lumiList = [] # List of tuples
160 >            # skip already analyzed blocks
161 >            fileblock = file['Block']['Name']
162 >            if fileblock not in anFileBlocks :
163 >                filename = file['LogicalFileName']
164 >                # asked retry the list of parent for the given child
165 >                if useparent==1:
166 >                    parList = [x['LogicalFileName'] for x in file['ParentList']]
167 >                if self.ads:
168 >                    lumiList = [ (x['RunNumber'], x['LumiSectionNumber'])
169 >                                 for x in file['LumiList'] ]
170 >                self.parent[filename] = parList
171 >                self.lumis[filename] = lumiList
172 >                if filename.find('.dat') < 0 :
173 >                    events    = file['NumberOfEvents']
174 >                    # Count number of events and lumis per block
175 >                    if fileblock in self.eventsPerBlock.keys() :
176 >                        self.eventsPerBlock[fileblock] += events
177 >                    else :
178 >                        self.eventsPerBlock[fileblock] = events
179 >                    # Number of events per file
180 >                    self.eventsPerFile[filename] = events
181 >
182 >                    # List of files per block
183 >                    if fileblock in self.blocksinfo.keys() :
184 >                        self.blocksinfo[fileblock].append(filename)
185 >                    else :
186 >                        self.blocksinfo[fileblock] = [filename]
187 >
188 >                    # total number of events
189 >                    self.maxEvents += events
190 >                    self.maxLumis  += len(lumiList)
191 >
192 >        if  self.skipBlocks and len(self.eventsPerBlock.keys()) == 0:
193 >            msg = "No new fileblocks available for dataset: "+str(self.datasetPath)
194 >            raise  CrabException(msg)
195 >
196 >        saveFblocks=''
197 >        for block in self.eventsPerBlock.keys() :
198 >            saveFblocks += str(block)+'\n'
199 >            common.logger.log(10-1,"DBSInfo: total nevts %i in block %s "%(self.eventsPerBlock[block],block))
200 >        writeTXTfile(self, fileBlocks_FileName , saveFblocks)
201 >
202 >        if len(self.eventsPerBlock) <= 0:
203 >            raise NotExistingDatasetError(("\nNo data for %s in DBS\nPlease check"
204 >                                            + " dataset path variables in crab.cfg")
205 >                                            % self.datasetPath)
206 >
207 >
208 >    def queryDbs(self,api,path=None,runselection=None,useParent=None):
209 >
210 >        allowedRetriveValue = ['retrive_block', 'retrive_run']
211 >        if self.ads: allowedRetriveValue.append('retrive_lumi')
212 >        if useParent == 1: allowedRetriveValue.append('retrive_parent')
213 >        common.logger.debug("Set of input parameters used for DBS query: %s" % allowedRetriveValue)
214          try:
215 <          parents=dbs.getDatasetProvenance()
216 <        except:
217 <          raise DataDiscoveryError
218 <
219 <        ## for each parent get the corresponding fileblocks
220 <        for aparent in parents:
221 <           ## fill the map dataset-owner for the parents
222 <           #pdataset=string.split(aparent,'/')[1]
223 <           #powner=string.split(aparent,'/')[3]
224 <           #self.dataset_owner[powner]=pdataset
225 <           ## instead of the map dataset-owner use the dbspaths  
226 <           parentdbsdataset=aparent.getDatasetPath()
227 <           self.dbspaths.append(parentdbsdataset)
228 <           #self.dbspaths.append(aparent)
229 <           ## get the fileblocks of the parents : FIXME remove the first / in the path
230 <           pdbs=DBSInfo(parentdbsdataset[1:-1],[])
231 <           try:
232 <             parentsblocksinfo=pdbs.getDatasetContents()
233 <           except:
234 <            raise DataDiscoveryError
215 >            if len(runselection) <=0 :
216 >                if useParent==1 or self.splitByRun==1 :
217 >                    if self.ads:
218 >                        files = api.listFiles(analysisDataset=path, retriveList=allowedRetriveValue)
219 >                    else :
220 >                        files = api.listFiles(path=path, retriveList=allowedRetriveValue)
221 >                else:
222 >                    files = api.listDatasetFiles(self.datasetPath)
223 >            else :
224 >                files=[]
225 >                for arun in runselection:
226 >                    try:
227 >                        if self.ads:
228 >                            filesinrun = api.listFiles(analysisDataset=path,retriveList=allowedRetriveValue,runNumber=arun)
229 >                        else:
230 >                            filesinrun = api.listFiles(path=path,retriveList=allowedRetriveValue,runNumber=arun)
231 >                        files.extend(filesinrun)
232 >                    except:
233 >                        msg="WARNING: problem extracting info from DBS for run %s "%arun
234 >                        common.logger.info(msg)
235 >                        pass
236 >
237 >        except DbsBadRequest, msg:
238 >            raise DataDiscoveryError(msg)
239 >        except DBSError, msg:
240 >            raise DataDiscoveryError(msg)
241  
242 <           self.allblocks.append(parentsblocksinfo.keys()) # add parent fileblocksinfo
242 >        return files
243  
76        ## all the required blocks
77        self.allblocks.append(self.blocksinfo.keys()) # add also the primary fileblocksinfo
244  
79
80 # #################################################
245      def getMaxEvents(self):
246          """
247 <         max events of the primary dataset-owner
247 >        max events
248          """
249 <        ## loop over the fileblocks of the primary dataset-owner
86 <        nevts=0      
87 <        for blockevts in self.blocksinfo.values():
88 <          nevts=nevts+blockevts
249 >        return self.maxEvents
250  
90        return nevts
251  
252 < # #################################################
93 <    def getDatasetOwnerPairs(self):
252 >    def getMaxLumis(self):
253          """
254 <         list all required dataset-owner pairs
254 >        Return the number of lumis in the dataset
255          """
256 <        return self.dataset_owner
257 < # #################################################
258 <    def getDBSPaths(self):
256 >        return self.maxLumis
257 >
258 >
259 >    def getEventsPerBlock(self):
260          """
261 <         list the DBSpaths for all required data
261 >        list the event collections structure by fileblock
262          """
263 <        return self.dbspaths
263 >        return self.eventsPerBlock
264  
265 < # #################################################
266 <    def getEVC(self):
265 >
266 >    def getEventsPerFile(self):
267          """
268 <         list the event collections structure by fileblock
268 >        list the event collections structure by file
269          """
270 <        print "To be used by a more complex job splitting... TODO later... "
271 <        print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo"
270 >        return self.eventsPerFile
271 >
272  
273 < # #################################################
114 <    def getFileBlocks(self):
273 >    def getFiles(self):
274          """
275 <         fileblocks for all required dataset-owners
275 >        return files grouped by fileblock
276          """
277 <        return self.allblocks        
277 >        return self.blocksinfo
278 >
279  
280 < ########################################################################
280 >    def getParent(self):
281 >        """
282 >        return parent grouped by file
283 >        """
284 >        return self.parent
285  
286  
287 +    def getLumis(self):
288 +        """
289 +        return lumi sections grouped by file
290 +        """
291 +        return self.lumis
292 +
293 +
294 +    def getListFiles(self):
295 +        """
296 +        return parent grouped by file
297 +        """
298 +        return self.files

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines