ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/DataDiscovery.py
(Generate patch)

Comparing COMP/CRAB/python/DataDiscovery.py (file contents):
Revision 1.8 by spiga, Thu Jul 20 11:59:09 2006 UTC vs.
Revision 1.28 by spiga, Fri Feb 6 18:14:58 2009 UTC

# Line 1 | Line 1
1   #!/usr/bin/env python
2 < import sys, os, string, re
3 < from DBSInfo import *
2 > import exceptions
3 > import DBSAPI.dbsApi
4 > from DBSAPI.dbsApiException import *
5 > import common
6 > from crab_util import *
7 > import os
8  
9  
10 + # #######################################
11 + class DBSError(exceptions.Exception):
12 +    def __init__(self, errorName, errorMessage):
13 +        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
14 +        exceptions.Exception.__init__(self, args)
15 +        pass
16 +    
17 +    def getErrorMessage(self):
18 +        """ Return error message """
19 +        return "%s" % (self.args)
20 +
21 + # #######################################
22 + class DBSInvalidDataTierError(exceptions.Exception):
23 +    def __init__(self, errorName, errorMessage):
24 +        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
25 +        exceptions.Exception.__init__(self, args)
26 +        pass
27 +    
28 +    def getErrorMessage(self):
29 +        """ Return error message """
30 +        return "%s" % (self.args)
31 +
32 + # #######################################
33 + class DBSInfoError:
34 +    def __init__(self, url):
35 +        print '\nERROR accessing DBS url : '+url+'\n'
36 +        pass
37 +
38   # ####################################
39   class DataDiscoveryError(exceptions.Exception):
40      def __init__(self, errorMessage):
41 <        args=errorMessage
42 <        exceptions.Exception.__init__(self, args)
41 >        self.args=errorMessage
42 >        exceptions.Exception.__init__(self, self.args)
43          pass
44  
45      def getErrorMessage(self):
# Line 17 | Line 49 | class DataDiscoveryError(exceptions.Exce
49   # ####################################
50   class NotExistingDatasetError(exceptions.Exception):
51      def __init__(self, errorMessage):
52 <        args=errorMessage
53 <        exceptions.Exception.__init__(self, args)
52 >        self.args=errorMessage
53 >        exceptions.Exception.__init__(self, self.args)
54          pass
55  
56      def getErrorMessage(self):
# Line 28 | Line 60 | class NotExistingDatasetError(exceptions
60   # ####################################
61   class NoDataTierinProvenanceError(exceptions.Exception):
62      def __init__(self, errorMessage):
63 <        args=errorMessage
64 <        exceptions.Exception.__init__(self, args)
63 >        self.args=errorMessage
64 >        exceptions.Exception.__init__(self, self.args)
65          pass
66  
67      def getErrorMessage(self):
# Line 39 | Line 71 | class NoDataTierinProvenanceError(except
71   # ####################################
72   # class to find and extact info from published data
73   class DataDiscovery:
74 <    def __init__(self, owner, dataset, dataTiers, cfg_params):
74 >    def __init__(self, datasetPath, cfg_params, skipAnBlocks):
75  
76 < #       Attributes
77 <        self.owner = owner
46 <        self.dataset = dataset
47 <        self.dataTiers = dataTiers
76 >        #       Attributes
77 >        self.datasetPath = datasetPath
78          self.cfg_params = cfg_params
79 +        self.skipBlocks = skipAnBlocks
80  
81 <        self.dbspaths= []     # DBS output: list of dbspaths for all data
82 <        self.allblocks = []   # DBS output: list of map fileblocks-totevts for all dataset-owners
83 <        self.blocksinfo = {}  # DBS output: map fileblocks-totevts for the primary block, used internally to this class
84 < #DBS output: max events computed by method getMaxEvents
81 >        self.eventsPerBlock = {}  # DBS output: map fileblocks-events for collection
82 >        self.eventsPerFile = {}   # DBS output: map files-events
83 >        self.blocksinfo = {}      # DBS output: map fileblocks-files
84 >        self.maxEvents = 0        # DBS output: max events
85 >        self.parent = {}       # DBS output: max events
86  
87   # ####################################
88      def fetchDBSInfo(self):
89          """
90          Contact DBS
91          """
92 <
93 <        ## add the PU among the required data tiers if the Digi are requested
94 <        if (self.dataTiers.count('Digi')>0) & (self.dataTiers.count('PU')<=0) :
95 <            self.dataTiers.append('PU')
96 <
97 <        ## get info about the requested dataset
98 <        dbs=DBSInfo()
92 >        ## get DBS URL
93 >        global_url="http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet"
94 >        caf_url = "http://cmsdbsprod.cern.ch/cms_dbs_caf_analysis_01/servlet/DBSServlet"
95 >        dbs_url_map  =   {'glite':    global_url,
96 >                          'glitecoll':global_url,\
97 >                          'condor':   global_url,\
98 >                          'condor_g': global_url,\
99 >                          'glidein':  global_url,\
100 >                          'lsf':      global_url,\
101 >                          'caf':      caf_url,\
102 >                          'sge':      global_url
103 >                          }
104 >
105 >        dbs_url_default = dbs_url_map[(common.scheduler.name()).lower()]
106 >        dbs_url=  self.cfg_params.get('CMSSW.dbs_url', dbs_url_default)
107 >        common.logger.debug(3,"Accessing DBS at: "+dbs_url)
108 >
109 >        ## check if runs are selected
110 >        runselection = []
111 >        if (self.cfg_params.has_key('CMSSW.runselection')):
112 >            runselection = parseRange2(self.cfg_params['CMSSW.runselection'])
113 >
114 >
115 >        self.splitByRun = int(self.cfg_params.get('CMSSW.split_by_run', 0))
116 >          
117 >        self.ads = int(self.cfg_params.get('CMSSW.ads', 0))
118 >
119 >        common.logger.debug(6,"runselection is: %s"%runselection)
120 >        ## service API
121 >        args = {}
122 >        args['url']     = dbs_url
123 >        args['level']   = 'CRITICAL'
124 >
125 >        ## check if has been requested to use the parent info
126 >        useparent = int(self.cfg_params.get('CMSSW.use_parent',0))
127 >
128 >        ## check if has been asked for a non default file to store/read analyzed fileBlocks  
129 >        defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt'  
130 >        fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName))
131 >
132 >        api = DBSAPI.dbsApi.DbsApi(args)
133 >
134 >        self.files = self.queryDbs(api,path=self.datasetPath,runselection=runselection,useParent=useparent)
135 >
136 >        anFileBlocks = []
137 >        if self.skipBlocks: anFileBlocks = readTXTfile(self, fileBlocks_FileName)
138 >
139 >        # parse files and fill arrays
140 >        for file in self.files :
141 >            parList = []
142 >            # skip already analyzed blocks
143 >            fileblock = file['Block']['Name']
144 >            if fileblock not in anFileBlocks :
145 >                filename = file['LogicalFileName']
146 >                # asked retry the list of parent for the given child
147 >                if useparent==1: parList = [x['LogicalFileName'] for x in file['ParentList']]
148 >                self.parent[filename] = parList
149 >                if filename.find('.dat') < 0 :
150 >                    events    = file['NumberOfEvents']
151 >                    # number of events per block
152 >                    if fileblock in self.eventsPerBlock.keys() :
153 >                        self.eventsPerBlock[fileblock] += events
154 >                    else :
155 >                        self.eventsPerBlock[fileblock] = events
156 >                    # number of events per file
157 >                    self.eventsPerFile[filename] = events
158 >            
159 >                    # number of events per block
160 >                    if fileblock in self.blocksinfo.keys() :
161 >                        self.blocksinfo[fileblock].append(filename)
162 >                    else :
163 >                        self.blocksinfo[fileblock] = [filename]
164 >            
165 >                    # total number of events
166 >                    self.maxEvents += events
167 >        if  self.skipBlocks and len(self.eventsPerBlock.keys()) == 0:
168 >            msg = "No new fileblocks available for dataset: "+str(self.datasetPath)
169 >            raise  CrabException(msg)    
170 >
171 >        saveFblocks=''
172 >        for block in self.eventsPerBlock.keys() :
173 >            saveFblocks += str(block)+'\n'
174 >            common.logger.debug(6,"DBSInfo: total nevts %i in block %s "%(self.eventsPerBlock[block],block))
175 >        writeTXTfile(self, fileBlocks_FileName , saveFblocks)
176 >                      
177 >        if len(self.eventsPerBlock) <= 0:
178 >            raise NotExistingDatasetError(("\nNo data for %s in DBS\nPlease check"
179 >                                            + " dataset path variables in crab.cfg")
180 >                                            % self.datasetPath)
181 >
182 >
183 > ###########################
184 >
185 >    def queryDbs(self,api,path=None,runselection=None,useParent=None):
186 >
187 >        allowedRetriveValue = [#'retrive_parent',
188 >                               'retrive_block',
189 >                               #'retrive_lumi',
190 >                               'retrive_run'
191 >                               ]
192          try:
193 <            self.datasets = dbs.getMatchingDatasets(self.owner, self.dataset)
194 <        except DBSError, ex:
195 <            raise DataDiscoveryError(ex.getErrorMessage())
196 <        if len(self.datasets) == 0:
197 <            raise DataDiscoveryError("Owner=%s, Dataset=%s unknown to DBS" % (self.owner, self.dataset))
198 <        if len(self.datasets) > 1:
199 <            raise DataDiscoveryError("Owner=%s, Dataset=%s is ambiguous" % (self.owner, self.dataset))
200 <        try:
201 <            self.dbsdataset = self.datasets[0].get('datasetPathName')
202 <            self.blocksinfo = dbs.getDatasetContents(self.dbsdataset)
203 <            self.allblocks.append (self.blocksinfo.keys ()) # add also the current fileblocksinfo
204 <            self.dbspaths.append(self.dbsdataset)
205 <        except DBSError, ex:
206 <            raise DataDiscoveryError(ex.getErrorMessage())
207 <        
208 <        if len(self.blocksinfo)<=0:
209 <            msg="\nERROR Data for %s do not exist in DBS! \n Check the dataset/owner variables in crab.cfg !"%self.dbsdataset
210 <            raise NotExistingDatasetError(msg)
211 <
193 >            if len(runselection) <=0 :
194 >                if useParent==1 or self.splitByRun==1 :
195 >                    if self.ads==1 :          
196 >                        files = api.listFiles(analysisDataset=path, retriveList=allowedRetriveValue)
197 >                    else :
198 >                        files = api.listFiles(path=path, retriveList=allowedRetriveValue)
199 >                    common.logger.debug(5,"Set of input parameters used for DBS query : \n"+str(allowedRetriveValue))
200 >                    common.logger.write("Set of input parameters used for DBS query : \n"+str(allowedRetriveValue))
201 >                else:
202 >                    files = api.listDatasetFiles(self.datasetPath)
203 >            else :
204 >                files=[]
205 >                for arun in runselection:
206 >                    try:
207 >                        if self.ads==1 : filesinrun = api.listFiles(analysisDataset=path,retriveList=allowedRetriveValue,runNumber=arun)
208 >                        else: filesinrun = api.listFiles(path=path,retriveList=allowedRetriveValue,runNumber=arun)
209 >                        files.extend(filesinrun)
210 >                    except:
211 >                        msg="WARNING: problem extracting info from DBS for run %s "%arun
212 >                        common.logger.message(msg)
213 >                        pass
214  
215 <        ## get info about the parents
216 <        try:
217 <            parents=dbs.getDatasetProvenance(self.dbsdataset, self.dataTiers)
91 <        except DBSInvalidDataTierError, ex:
92 <            msg=ex.getErrorMessage()+' \n Check the data_tier variable in crab.cfg !\n'
215 >        except DbsBadRequest, msg:
216 >            raise DataDiscoveryError(msg)
217 >        except DBSError, msg:
218              raise DataDiscoveryError(msg)
94        except DBSError, ex:
95            raise DataDiscoveryError(ex.getErrorMessage())
96
97        ## check that the user asks for parent Data Tier really existing in the DBS provenance
98        self.checkParentDataTier(parents, self.dataTiers)
99
100        ## for each parent get the corresponding fileblocks
101        try:
102            for p in parents:
103                ## fill a list of dbspaths
104                parentPath = p.get('parent').get('datasetPathName')
105                self.dbspaths.append (parentPath)
106                parentBlocks = dbs.getDatasetContents (parentPath)
107                self.allblocks.append (parentBlocks.keys ())  # add parent fileblocksinfo
108        except DBSError, ex:
109                raise DataDiscoveryError(ex.getErrorMessage())
110
111 # #################################################
112    def checkParentDataTier(self, parents, dataTiers):
113        """
114        check that the data tiers requested by the user really exists in the provenance of the given dataset
115        """
116        startType = string.split(self.dbsdataset,'/')[2]
117        # for example 'type' is PU and 'dataTier' is Hit
118        parentTypes = map(lambda p: p.get('type'), parents)
119        for tier in dataTiers:
120            if parentTypes.count(tier) <= 0 and tier != startType:
121                msg="\nERROR Data %s not published in DBS with asked data tiers : the data tier not found is %s !\n  Check the data_tier variable in crab.cfg !"%(self.dbsdataset,tier)
122                raise  NoDataTierinProvenanceError(msg)
219  
220 +        return files
221  
222   # #################################################
223      def getMaxEvents(self):
224          """
225 <        max events of the primary dataset-owner
225 >        max events
226 >        """
227 >        return self.maxEvents
228 >
229 > # #################################################
230 >    def getEventsPerBlock(self):
231 >        """
232 >        list the event collections structure by fileblock
233          """
234 <        ## loop over the fileblocks of the primary dataset-owner
131 <        nevts=0      
132 <        for blockevts in self.blocksinfo.values():
133 <            nevts=nevts+blockevts
234 >        return self.eventsPerBlock
235  
236 <        return nevts
236 > # #################################################
237 >    def getEventsPerFile(self):
238 >        """
239 >        list the event collections structure by file
240 >        """
241 >        return self.eventsPerFile
242  
243   # #################################################
244 <    def getDBSPaths(self):
244 >    def getFiles(self):
245          """
246 <        list the DBSpaths for all required data
246 >        return files grouped by fileblock
247          """
248 <        return self.dbspaths
248 >        return self.blocksinfo        
249  
250   # #################################################
251 <    def getEVC(self):
251 >    def getParent(self):
252          """
253 <        list the event collections structure by fileblock
253 >        return parent grouped by file
254          """
255 <        print "To be used by a more complex job splitting... TODO later... "
150 <        print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo"
255 >        return self.parent        
256  
257   # #################################################
258 <    def getFileBlocks(self):
258 >    def getListFiles(self):
259          """
260 <        fileblocks for all required dataset-owners
260 >        return parent grouped by file
261          """
262 <        return self.allblocks        
262 >        return self.files        
263  
264   ########################################################################

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines