ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/DataDiscovery.py
(Generate patch)

Comparing COMP/CRAB/python/DataDiscovery.py (file contents):
Revision 1.3 by afanfani, Sun Jan 29 01:46:08 2006 UTC vs.
Revision 1.30 by spiga, Tue Mar 31 22:36:42 2009 UTC

# Line 1 | Line 1
1 < #!/usr/bin/env python2
2 < import sys, os, string, re
3 < from DBSInfo import *
4 <
1 > #!/usr/bin/env python
2 > import exceptions
3 > import DBSAPI.dbsApi
4 > from DBSAPI.dbsApiException import *
5 > import common
6 > from crab_util import *
7 > import os
8 >
9 >
10 > # #######################################
11 > class DBSError(exceptions.Exception):
12 >    def __init__(self, errorName, errorMessage):
13 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
14 >        exceptions.Exception.__init__(self, args)
15 >        pass
16 >    
17 >    def getErrorMessage(self):
18 >        """ Return error message """
19 >        return "%s" % (self.args)
20 >
21 > # #######################################
22 > class DBSInvalidDataTierError(exceptions.Exception):
23 >    def __init__(self, errorName, errorMessage):
24 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
25 >        exceptions.Exception.__init__(self, args)
26 >        pass
27 >    
28 >    def getErrorMessage(self):
29 >        """ Return error message """
30 >        return "%s" % (self.args)
31 >
32 > # #######################################
33 > class DBSInfoError:
34 >    def __init__(self, url):
35 >        print '\nERROR accessing DBS url : '+url+'\n'
36 >        pass
37  
38   # ####################################
39   class DataDiscoveryError(exceptions.Exception):
40 <  def __init__(self, errorMessage):
41 <   args=errorMessage
42 <   exceptions.Exception.__init__(self, args)
43 <   pass
44 <
45 <  def getErrorMessage(self):
46 <   """ Return exception error """
47 <   return "%s" % (self.args)
40 >    def __init__(self, errorMessage):
41 >        self.args=errorMessage
42 >        exceptions.Exception.__init__(self, self.args)
43 >        pass
44 >
45 >    def getErrorMessage(self):
46 >        """ Return exception error """
47 >        return "%s" % (self.args)
48  
49   # ####################################
50   class NotExistingDatasetError(exceptions.Exception):
51 <  def __init__(self, errorMessage):
52 <   args=errorMessage
53 <   exceptions.Exception.__init__(self, args)
54 <   pass
55 <
56 <  def getErrorMessage(self):
57 <   """ Return exception error """
58 <   return "%s" % (self.args)
51 >    def __init__(self, errorMessage):
52 >        self.args=errorMessage
53 >        exceptions.Exception.__init__(self, self.args)
54 >        pass
55 >
56 >    def getErrorMessage(self):
57 >        """ Return exception error """
58 >        return "%s" % (self.args)
59  
60   # ####################################
61   class NoDataTierinProvenanceError(exceptions.Exception):
62 <  def __init__(self, errorMessage):
63 <   args=errorMessage
64 <   exceptions.Exception.__init__(self, args)
65 <   pass
66 <
67 <  def getErrorMessage(self):
68 <   """ Return exception error """
69 <   return "%s" % (self.args)
62 >    def __init__(self, errorMessage):
63 >        self.args=errorMessage
64 >        exceptions.Exception.__init__(self, self.args)
65 >        pass
66 >
67 >    def getErrorMessage(self):
68 >        """ Return exception error """
69 >        return "%s" % (self.args)
70  
71   # ####################################
72   # class to find and extact info from published data
73   class DataDiscovery:
74 <    def __init__(self, owner, dataset, dataTiers, cfg_params):
74 >    def __init__(self, datasetPath, cfg_params, skipAnBlocks):
75  
76 < #       Attributes
77 <        self.dbsdataset='/'+dataset+'/datatier/'+owner
46 <        self.dataTiers = dataTiers
76 >        #       Attributes
77 >        self.datasetPath = datasetPath
78          self.cfg_params = cfg_params
79 +        self.skipBlocks = skipAnBlocks
80  
81 <        self.dbspaths= []     # DBS output: list of dbspaths for all data
82 <        self.allblocks = []   # DBS output: list of map fileblocks-totevts for all dataset-owners
83 <        self.blocksinfo = {}  # DBS output: map fileblocks-totevts for the primary block, used internally to this class
84 < #DBS output: max events computed by method getMaxEvents
81 >        self.eventsPerBlock = {}  # DBS output: map fileblocks-events for collection
82 >        self.eventsPerFile = {}   # DBS output: map files-events
83 >        self.blocksinfo = {}      # DBS output: map fileblocks-files
84 >        self.maxEvents = 0        # DBS output: max events
85 >        self.parent = {}       # DBS output: max events
86  
87   # ####################################
88      def fetchDBSInfo(self):
89          """
90          Contact DBS
91          """
92 <        parents = []
93 <        parentsblocksinfo = {}
94 <
95 <        ## add the PU among the required data tiers if the Digi are requested
96 <        if (self.dataTiers.count('Digi')>0) & (self.dataTiers.count('PU')<=0) :
97 <          self.dataTiers.append('PU')
98 <
99 <        ## get info about the requested dataset
100 <        dbs=DBSInfo(self.dbsdataset,self.dataTiers)
101 <        try:
102 <          self.blocksinfo=dbs.getDatasetContents()
103 <        except DBSError, ex:
104 <          raise DataDiscoveryError(ex.getErrorMessage())
105 <        
106 <        if len(self.blocksinfo)<=0:
107 <         msg="\nERROR Data %s do not exist in DBS! \n Check the dataset/owner variables in crab.cfg !"%self.dbsdataset
108 <         raise NotExistingDatasetError(msg)
109 <
110 <        currentdatatier=string.split(self.blocksinfo.keys()[0],'/')[2]
111 <        fakedatatier=string.split(self.dbsdataset,'/')[2]
112 <        currentdbsdataset=string.replace(self.dbsdataset, fakedatatier, currentdatatier)  
113 <
114 <        self.dbspaths.append(currentdbsdataset)    # add the requested dbspath
115 <
116 <        ## get info about the parents
92 >        ## get DBS URL
93 >        global_url="http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet"
94 >        caf_url = "http://cmsdbsprod.cern.ch/cms_dbs_caf_analysis_01/servlet/DBSServlet"
95 >        dbs_url_map  =   {'glite':    global_url,
96 >                          'glitecoll':global_url,\
97 >                          'condor':   global_url,\
98 >                          'condor_g': global_url,\
99 >                          'glidein':  global_url,\
100 >                          'lsf':      global_url,\
101 >                          'caf':      caf_url,\
102 >                          'sge':      global_url,
103 >                          'arc':      global_url
104 >                          }
105 >
106 >        dbs_url_default = dbs_url_map[(common.scheduler.name()).lower()]
107 >        dbs_url=  self.cfg_params.get('CMSSW.dbs_url', dbs_url_default)
108 >        common.logger.debug(3,"Accessing DBS at: "+dbs_url)
109 >
110 >        ## check if runs are selected
111 >        runselection = []
112 >        if (self.cfg_params.has_key('CMSSW.runselection')):
113 >            runselection = parseRange2(self.cfg_params['CMSSW.runselection'])
114 >
115 >
116 >        self.splitByRun = int(self.cfg_params.get('CMSSW.split_by_run', 0))
117 >          
118 >        self.ads = int(self.cfg_params.get('CMSSW.ads', 0))
119 >
120 >        common.logger.debug(6,"runselection is: %s"%runselection)
121 >        ## service API
122 >        args = {}
123 >        args['url']     = dbs_url
124 >        args['level']   = 'CRITICAL'
125 >
126 >        ## check if has been requested to use the parent info
127 >        useparent = int(self.cfg_params.get('CMSSW.use_parent',0))
128 >
129 >        ## check if has been asked for a non default file to store/read analyzed fileBlocks  
130 >        defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt'  
131 >        fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName))
132 >
133 >        api = DBSAPI.dbsApi.DbsApi(args)
134 >
135 >        self.files = self.queryDbs(api,path=self.datasetPath,runselection=runselection,useParent=useparent)
136 >
137 >        anFileBlocks = []
138 >        if self.skipBlocks: anFileBlocks = readTXTfile(self, fileBlocks_FileName)
139 >
140 >        # parse files and fill arrays
141 >        for file in self.files :
142 >            parList = []
143 >            # skip already analyzed blocks
144 >            fileblock = file['Block']['Name']
145 >            if fileblock not in anFileBlocks :
146 >                filename = file['LogicalFileName']
147 >                # asked retry the list of parent for the given child
148 >                if useparent==1: parList = [x['LogicalFileName'] for x in file['ParentList']]
149 >                self.parent[filename] = parList
150 >                if filename.find('.dat') < 0 :
151 >                    events    = file['NumberOfEvents']
152 >                    # number of events per block
153 >                    if fileblock in self.eventsPerBlock.keys() :
154 >                        self.eventsPerBlock[fileblock] += events
155 >                    else :
156 >                        self.eventsPerBlock[fileblock] = events
157 >                    # number of events per file
158 >                    self.eventsPerFile[filename] = events
159 >            
160 >                    # number of events per block
161 >                    if fileblock in self.blocksinfo.keys() :
162 >                        self.blocksinfo[fileblock].append(filename)
163 >                    else :
164 >                        self.blocksinfo[fileblock] = [filename]
165 >            
166 >                    # total number of events
167 >                    self.maxEvents += events
168 >        if  self.skipBlocks and len(self.eventsPerBlock.keys()) == 0:
169 >            msg = "No new fileblocks available for dataset: "+str(self.datasetPath)
170 >            raise  CrabException(msg)    
171 >
172 >        saveFblocks=''
173 >        for block in self.eventsPerBlock.keys() :
174 >            saveFblocks += str(block)+'\n'
175 >            common.logger.debug(6,"DBSInfo: total nevts %i in block %s "%(self.eventsPerBlock[block],block))
176 >        writeTXTfile(self, fileBlocks_FileName , saveFblocks)
177 >                      
178 >        if len(self.eventsPerBlock) <= 0:
179 >            raise NotExistingDatasetError(("\nNo data for %s in DBS\nPlease check"
180 >                                            + " dataset path variables in crab.cfg")
181 >                                            % self.datasetPath)
182 >
183 >
184 > ###########################
185 >
186 >    def queryDbs(self,api,path=None,runselection=None,useParent=None):
187 >
188 >        allowedRetriveValue = ['retrive_block', 'retrive_run']
189 >        if useParent==1 : allowedRetriveValue = allowedRetriveValue + ['retrive_parent']
190          try:
191 <          parents=dbs.getDatasetProvenance()
192 <        except DBSInvalidDataTierError, ex:
193 <          msg=ex.getErrorMessage()+' \n Check the data_tier variable in crab.cfg !\n'
194 <          raise DataDiscoveryError(msg)
195 <        except DBSError, ex:
196 <          raise DataDiscoveryError(ex.getErrorMessage())
197 <
198 <        ## check that the user asks for parent Data Tier really existing in the DBS provenance
199 <        self.checkParentDataTier(parents, self.dataTiers, currentdbsdataset)
200 <
201 <        ## for each parent get the corresponding fileblocks
202 <        for aparent in parents:
203 <           ## fill a list of dbspaths
204 <           parentdbsdataset=aparent.getDatasetPath()
205 <           self.dbspaths.append(parentdbsdataset)
206 <           pdbs=DBSInfo(parentdbsdataset,[])
207 <           try:
208 <             parentsblocksinfo=pdbs.getDatasetContents()
209 <           except DBSError, ex:
210 <            raise DataDiscoveryError(ex.getErrorMessage())
211 <
212 <           self.allblocks.append(parentsblocksinfo.keys()) # add parent fileblocksinfo
213 <
214 <        ## all the required blocks
215 <        self.allblocks.append(self.blocksinfo.keys()) # add also the current fileblocksinfo
191 >            if len(runselection) <=0 :
192 >                if useParent==1 or self.splitByRun==1 :
193 >                    if self.ads==1 :          
194 >                        files = api.listFiles(analysisDataset=path, retriveList=allowedRetriveValue)
195 >                    else :
196 >                        files = api.listFiles(path=path, retriveList=allowedRetriveValue)
197 >                    common.logger.debug(5,"Set of input parameters used for DBS query : \n"+str(allowedRetriveValue))
198 >                    common.logger.write("Set of input parameters used for DBS query : \n"+str(allowedRetriveValue))
199 >                else:
200 >                    files = api.listDatasetFiles(self.datasetPath)
201 >            else :
202 >                files=[]
203 >                for arun in runselection:
204 >                    try:
205 >                        if self.ads==1 : filesinrun = api.listFiles(analysisDataset=path,retriveList=allowedRetriveValue,runNumber=arun)
206 >                        else: filesinrun = api.listFiles(path=path,retriveList=allowedRetriveValue,runNumber=arun)
207 >                        files.extend(filesinrun)
208 >                    except:
209 >                        msg="WARNING: problem extracting info from DBS for run %s "%arun
210 >                        common.logger.message(msg)
211 >                        pass
212 >
213 >        except DbsBadRequest, msg:
214 >            raise DataDiscoveryError(msg)
215 >        except DBSError, msg:
216 >            raise DataDiscoveryError(msg)
217  
218 +        return files
219  
220   # #################################################
221 <    def checkParentDataTier(self, parents, user_datatiers, currentdbsdataset ):
221 >    def getMaxEvents(self):
222          """
223 <         check that the data tiers requested by the user really exists in the provenance of the given dataset
223 >        max events
224          """
225 <
118 <        current_datatier=string.split(currentdbsdataset,'/')[2]
119 <
120 <        parent_datatypes=[]
121 <        for aparent in parents:
122 <          parent_datatypes.append(aparent.getDataType())
123 <
124 <        for datatier in user_datatiers:
125 <          if parent_datatypes.count(datatier)<=0:
126 <             # the current datatier is not supposed to be in the provenance
127 <             if not (datatier == current_datatier):  
128 <              msg="\nERROR Data %s not published in DBS with asked data tiers : the data tier not found is %s !\n  Check the data_tier variable in crab.cfg !"%(currentdbsdataset,datatier)
129 <              raise  NoDataTierinProvenanceError(msg)
130 <
225 >        return self.maxEvents
226  
227   # #################################################
228 <    def getMaxEvents(self):
228 >    def getEventsPerBlock(self):
229          """
230 <         max events of the primary dataset-owner
230 >        list the event collections structure by fileblock
231          """
232 <        ## loop over the fileblocks of the primary dataset-owner
138 <        nevts=0      
139 <        for blockevts in self.blocksinfo.values():
140 <          nevts=nevts+blockevts
232 >        return self.eventsPerBlock
233  
234 <        return nevts
234 > # #################################################
235 >    def getEventsPerFile(self):
236 >        """
237 >        list the event collections structure by file
238 >        """
239 >        return self.eventsPerFile
240  
241   # #################################################
242 <    def getDBSPaths(self):
242 >    def getFiles(self):
243          """
244 <         list the DBSpaths for all required data
244 >        return files grouped by fileblock
245          """
246 <        return self.dbspaths
246 >        return self.blocksinfo        
247  
248   # #################################################
249 <    def getEVC(self):
249 >    def getParent(self):
250          """
251 <         list the event collections structure by fileblock
251 >        return parent grouped by file
252          """
253 <        print "To be used by a more complex job splitting... TODO later... "
157 <        print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo"
253 >        return self.parent        
254  
255   # #################################################
256 <    def getFileBlocks(self):
256 >    def getListFiles(self):
257          """
258 <         fileblocks for all required dataset-owners
258 >        return parent grouped by file
259          """
260 <        return self.allblocks        
260 >        return self.files        
261  
262   ########################################################################
167
168

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines