ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/DataDiscovery.py
(Generate patch)

Comparing COMP/CRAB/python/DataDiscovery.py (file contents):
Revision 1.7 by slacapra, Thu Jul 6 10:22:47 2006 UTC vs.
Revision 1.32 by ewv, Wed Jul 29 21:41:50 2009 UTC

# Line 1 | Line 1
1   #!/usr/bin/env python
2 < import sys, os, string, re
3 < from DBSInfo import *
2 > import exceptions
3 > import DBSAPI.dbsApi
4 > from DBSAPI.dbsApiException import *
5 > import common
6 > from crab_util import *
7 > import os
8 >
9 >
10 >
11 > class DBSError(exceptions.Exception):
12 >    def __init__(self, errorName, errorMessage):
13 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
14 >        exceptions.Exception.__init__(self, args)
15 >        pass
16 >
17 >    def getErrorMessage(self):
18 >        """ Return error message """
19 >        return "%s" % (self.args)
20 >
21 >
22 >
23 > class DBSInvalidDataTierError(exceptions.Exception):
24 >    def __init__(self, errorName, errorMessage):
25 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
26 >        exceptions.Exception.__init__(self, args)
27 >        pass
28 >
29 >    def getErrorMessage(self):
30 >        """ Return error message """
31 >        return "%s" % (self.args)
32 >
33 >
34 >
35 > class DBSInfoError:
36 >    def __init__(self, url):
37 >        print '\nERROR accessing DBS url : '+url+'\n'
38 >        pass
39 >
40  
41  
6 # ####################################
42   class DataDiscoveryError(exceptions.Exception):
43      def __init__(self, errorMessage):
44 <        args=errorMessage
45 <        exceptions.Exception.__init__(self, args)
44 >        self.args=errorMessage
45 >        exceptions.Exception.__init__(self, self.args)
46          pass
47  
48      def getErrorMessage(self):
49          """ Return exception error """
50          return "%s" % (self.args)
51  
52 < # ####################################
52 >
53 >
54   class NotExistingDatasetError(exceptions.Exception):
55      def __init__(self, errorMessage):
56 <        args=errorMessage
57 <        exceptions.Exception.__init__(self, args)
56 >        self.args=errorMessage
57 >        exceptions.Exception.__init__(self, self.args)
58          pass
59  
60      def getErrorMessage(self):
61          """ Return exception error """
62          return "%s" % (self.args)
63  
64 < # ####################################
64 >
65 >
66   class NoDataTierinProvenanceError(exceptions.Exception):
67      def __init__(self, errorMessage):
68 <        args=errorMessage
69 <        exceptions.Exception.__init__(self, args)
68 >        self.args=errorMessage
69 >        exceptions.Exception.__init__(self, self.args)
70          pass
71  
72      def getErrorMessage(self):
73          """ Return exception error """
74          return "%s" % (self.args)
75  
39 # ####################################
40 # class to find and extact info from published data
41 class DataDiscovery:
42    def __init__(self, owner, dataset, dataTiers, cfg_params):
76  
77 < #       Attributes
78 <        self.owner = owner
79 <        self.dataset = dataset
80 <        self.dataTiers = dataTiers
77 >
78 > class DataDiscovery:
79 >    """
80 >    Class to find and extact info from published data
81 >    """
82 >    def __init__(self, datasetPath, cfg_params, skipAnBlocks):
83 >
84 >        #       Attributes
85 >        self.datasetPath = datasetPath
86 >        # Analysis dataset is primary/processed/tier/definition
87 >        self.ads = len(self.datasetPath.split("/")) > 3
88          self.cfg_params = cfg_params
89 +        self.skipBlocks = skipAnBlocks
90 +
91 +        self.eventsPerBlock = {}  # DBS output: map fileblocks-events for collection
92 +        self.eventsPerFile = {}   # DBS output: map files-events
93 + #         self.lumisPerBlock = {}   # DBS output: number of lumis in each block
94 + #         self.lumisPerFile = {}    # DBS output: number of lumis in each file
95 +        self.blocksinfo = {}      # DBS output: map fileblocks-files
96 +        self.maxEvents = 0        # DBS output: max events
97 +        self.maxLumis = 0         # DBS output: total number of lumis
98 +        self.parent = {}          # DBS output: parents of each file
99 +        self.lumis = {}           # DBS output: lumis in each file
100  
50        self.dbspaths= []     # DBS output: list of dbspaths for all data
51        self.allblocks = []   # DBS output: list of map fileblocks-totevts for all dataset-owners
52        self.blocksinfo = {}  # DBS output: map fileblocks-totevts for the primary block, used internally to this class
53 #DBS output: max events computed by method getMaxEvents
101  
55 # ####################################
102      def fetchDBSInfo(self):
103          """
104          Contact DBS
105          """
106 +        ## get DBS URL
107 +        global_url="http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet"
108 +        caf_url = "http://cmsdbsprod.cern.ch/cms_dbs_caf_analysis_01/servlet/DBSServlet"
109 +        dbs_url_map  =   {'glite':    global_url,
110 +                          'glitecoll':global_url,\
111 +                          'condor':   global_url,\
112 +                          'condor_g': global_url,\
113 +                          'glidein':  global_url,\
114 +                          'lsf':      global_url,\
115 +                          'caf':      caf_url,\
116 +                          'sge':      global_url,
117 +                          'arc':      global_url
118 +                          }
119 +
120 +        dbs_url_default = dbs_url_map[(common.scheduler.name()).lower()]
121 +        dbs_url=  self.cfg_params.get('CMSSW.dbs_url', dbs_url_default)
122 +        common.logger.debug("Accessing DBS at: "+dbs_url)
123 +
124 +        ## check if runs are selected
125 +        runselection = []
126 +        if (self.cfg_params.has_key('CMSSW.runselection')):
127 +            runselection = parseRange2(self.cfg_params['CMSSW.runselection'])
128 +
129 +
130 +        self.splitByRun = int(self.cfg_params.get('CMSSW.split_by_run', 0))
131 +
132 +        common.logger.log(10-1,"runselection is: %s"%runselection)
133 +        ## service API
134 +        args = {}
135 +        args['url']     = dbs_url
136 +        args['level']   = 'CRITICAL'
137 +
138 +        ## check if has been requested to use the parent info
139 +        useparent = int(self.cfg_params.get('CMSSW.use_parent',0))
140 +
141 +        ## check if has been asked for a non default file to store/read analyzed fileBlocks
142 +        defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt'
143 +        fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName))
144 +
145 +        api = DBSAPI.dbsApi.DbsApi(args)
146 +        self.files = self.queryDbs(api,path=self.datasetPath,runselection=runselection,useParent=useparent)
147 +
148 +        anFileBlocks = []
149 +        if self.skipBlocks: anFileBlocks = readTXTfile(self, fileBlocks_FileName)
150 +
151 +        # parse files and fill arrays
152 +        for file in self.files :
153 +            parList  = []
154 +            lumiList = [] # List of tuples
155 +            # skip already analyzed blocks
156 +            fileblock = file['Block']['Name']
157 +            if fileblock not in anFileBlocks :
158 +                filename = file['LogicalFileName']
159 +                # asked retry the list of parent for the given child
160 +                if useparent==1:
161 +                    parList = [x['LogicalFileName'] for x in file['ParentList']]
162 +                if self.ads:
163 +                    lumiList = [ (x['RunNumber'], x['LumiSectionNumber'])
164 +                                 for x in file['LumiList'] ]
165 +                self.parent[filename] = parList
166 +                self.lumis[filename] = lumiList
167 +                if filename.find('.dat') < 0 :
168 +                    events    = file['NumberOfEvents']
169 +                    # Count number of events and lumis per block
170 +                    if fileblock in self.eventsPerBlock.keys() :
171 +                        self.eventsPerBlock[fileblock] += events
172 +                    else :
173 +                        self.eventsPerBlock[fileblock] = events
174 +                    # Number of events per file
175 +                    self.eventsPerFile[filename] = events
176 +
177 +                    # List of files per block
178 +                    if fileblock in self.blocksinfo.keys() :
179 +                        self.blocksinfo[fileblock].append(filename)
180 +                    else :
181 +                        self.blocksinfo[fileblock] = [filename]
182 +
183 +                    # total number of events
184 +                    self.maxEvents += events
185 +                    self.maxLumis  += len(lumiList)
186 +
187 +        if  self.skipBlocks and len(self.eventsPerBlock.keys()) == 0:
188 +            msg = "No new fileblocks available for dataset: "+str(self.datasetPath)
189 +            raise  CrabException(msg)
190 +
191 +        saveFblocks=''
192 +        for block in self.eventsPerBlock.keys() :
193 +            saveFblocks += str(block)+'\n'
194 +            common.logger.log(10-1,"DBSInfo: total nevts %i in block %s "%(self.eventsPerBlock[block],block))
195 +        writeTXTfile(self, fileBlocks_FileName , saveFblocks)
196 +
197 +        if len(self.eventsPerBlock) <= 0:
198 +            raise NotExistingDatasetError(("\nNo data for %s in DBS\nPlease check"
199 +                                            + " dataset path variables in crab.cfg")
200 +                                            % self.datasetPath)
201 +
202 +
203 +    def queryDbs(self,api,path=None,runselection=None,useParent=None):
204 +
205 +        allowedRetriveValue = ['retrive_block', 'retrive_run']
206 +        if self.ads: allowedRetriveValue.append('retrive_lumi')
207 +        if useParent == 1: allowedRetriveValue.append('retrive_parent')
208 +        common.logger.debug("Set of input parameters used for DBS query: %s" % allowedRetriveValue)
209 +        try:
210 +            if len(runselection) <=0 :
211 +                if useParent==1 or self.splitByRun==1 :
212 +                    if self.ads:
213 +                        files = api.listFiles(analysisDataset=path, retriveList=allowedRetriveValue)
214 +                    else :
215 +                        files = api.listFiles(path=path, retriveList=allowedRetriveValue)
216 +                else:
217 +                    files = api.listDatasetFiles(self.datasetPath)
218 +            else :
219 +                files=[]
220 +                for arun in runselection:
221 +                    try:
222 +                        if self.ads:
223 +                            filesinrun = api.listFiles(analysisDataset=path,retriveList=allowedRetriveValue,runNumber=arun)
224 +                        else:
225 +                            filesinrun = api.listFiles(path=path,retriveList=allowedRetriveValue,runNumber=arun)
226 +                        files.extend(filesinrun)
227 +                    except:
228 +                        msg="WARNING: problem extracting info from DBS for run %s "%arun
229 +                        common.logger.info(msg)
230 +                        pass
231  
232 <        ## add the PU among the required data tiers if the Digi are requested
233 <        if (self.dataTiers.count('Digi')>0) & (self.dataTiers.count('PU')<=0) :
234 <            self.dataTiers.append('PU')
232 >        except DbsBadRequest, msg:
233 >            raise DataDiscoveryError(msg)
234 >        except DBSError, msg:
235 >            raise DataDiscoveryError(msg)
236  
237 <        ## get info about the requested dataset
66 <        dbs=DBSInfo()
67 <        try:
68 <            self.datasets = dbs.getMatchingDatasets(self.owner, self.dataset)
69 <        except DBSError, ex:
70 <            raise DataDiscoveryError(ex.getErrorMessage())
71 <        if len(self.datasets) == 0:
72 <            raise DataDiscoveryError("Owner=%s, Dataset=%s unknown to DBS" % (self.owner, self.dataset))
73 <        if len(self.datasets) > 1:
74 <            raise DataDiscoveryError("Owner=%s, Dataset=%s is ambiguous" % (self.owner, self.dataset))
75 <        try:
76 <            self.dbsdataset = self.datasets[0].get('datasetPathName')
77 <            self.blocksinfo = dbs.getDatasetContents(self.dbsdataset)
78 <            self.allblocks.append (self.blocksinfo.keys ()) # add also the current fileblocksinfo
79 <            self.dbspaths.append(self.dbsdataset)
80 <        except DBSError, ex:
81 <            raise DataDiscoveryError(ex.getErrorMessage())
82 <        
83 <        if len(self.blocksinfo)<=0:
84 <            msg="\nERROR Data for %s do not exist in DBS! \n Check the dataset/owner variables in crab.cfg !"%self.dbsdataset
85 <            raise NotExistingDatasetError(msg)
237 >        return files
238  
239  
240 <        ## get info about the parents
241 <        try:
242 <            parents=dbs.getDatasetProvenance(self.dbsdataset, self.dataTiers)
243 <        except DBSInvalidDataTierError, ex:
244 <            msg=ex.getErrorMessage()+' \n Check the data_tier variable in crab.cfg !\n'
93 <            raise DataDiscoveryError(msg)
94 <        except DBSError, ex:
95 <            raise DataDiscoveryError(ex.getErrorMessage())
240 >    def getMaxEvents(self):
241 >        """
242 >        max events
243 >        """
244 >        return self.maxEvents
245  
97        ## check that the user asks for parent Data Tier really existing in the DBS provenance
98        self.checkParentDataTier(parents, self.dataTiers)
246  
247 <        ## for each parent get the corresponding fileblocks
248 <        try:
249 <            for p in parents:
250 <                ## fill a list of dbspaths
251 <                parentPath = p.get('parent').get('datasetPathName')
105 <                self.dbspaths.append (parentPath)
106 <                parentBlocks = dbs.getDatasetContents (parentPath)
107 <                self.allblocks.append (parentBlocks.keys ())  # add parent fileblocksinfo
108 <            except DBSError, ex:
109 <                raise DataDiscoveryError(ex.getErrorMessage())
110 <
111 < # #################################################
112 <    def checkParentDataTier(self, parents, dataTiers):
113 <        """
114 <        check that the data tiers requested by the user really exists in the provenance of the given dataset
115 <        """
116 <        startType = string.split(self.dbsdataset,'/')[2]
117 <        # for example 'type' is PU and 'dataTier' is Hit
118 <        parentTypes = map(lambda p: p.get('type'), parents)
119 <        for tier in dataTiers:
120 <            if parentTypes.count(tier) <= 0 and tier != startType:
121 <                msg="\nERROR Data %s not published in DBS with asked data tiers : the data tier not found is %s !\n  Check the data_tier variable in crab.cfg !"%(self.dbsdataset,tier)
122 <                raise  NoDataTierinProvenanceError(msg)
247 >    def getEventsPerBlock(self):
248 >        """
249 >        list the event collections structure by fileblock
250 >        """
251 >        return self.eventsPerBlock
252  
253  
254 < # #################################################
126 <    def getMaxEvents(self):
254 >    def getEventsPerFile(self):
255          """
256 <        max events of the primary dataset-owner
256 >        list the event collections structure by file
257          """
258 <        ## loop over the fileblocks of the primary dataset-owner
131 <        nevts=0      
132 <        for blockevts in self.blocksinfo.values():
133 <            nevts=nevts+blockevts
258 >        return self.eventsPerFile
259  
135        return nevts
260  
261 < # #################################################
138 <    def getDBSPaths(self):
261 >    def getFiles(self):
262          """
263 <        list the DBSpaths for all required data
263 >        return files grouped by fileblock
264          """
265 <        return self.dbspaths
265 >        return self.blocksinfo
266 >
267  
268 < # #################################################
145 <    def getEVC(self):
268 >    def getParent(self):
269          """
270 <        list the event collections structure by fileblock
270 >        return parent grouped by file
271          """
272 <        print "To be used by a more complex job splitting... TODO later... "
273 <        print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo"
272 >        return self.parent
273 >
274  
275 < # #################################################
153 <    def getFileBlocks(self):
275 >    def getLumis(self):
276          """
277 <        fileblocks for all required dataset-owners
277 >        return lumi sections grouped by file
278          """
279 <        return self.allblocks        
279 >        return self.lumis
280  
281 < ########################################################################
281 >
282 >    def getListFiles(self):
283 >        """
284 >        return parent grouped by file
285 >        """
286 >        return self.files

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines