ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/DataDiscovery.py
(Generate patch)

Comparing COMP/CRAB/python/DataDiscovery.py (file contents):
Revision 1.5 by afanfani, Thu May 18 18:46:22 2006 UTC vs.
Revision 1.23 by spiga, Mon Jun 9 17:50:45 2008 UTC

# Line 1 | Line 1
1 < #!/usr/bin/env python2
2 < import sys, os, string, re
3 < from DBSInfo import *
4 <
1 > #!/usr/bin/env python
2 > import exceptions
3 > import DBSAPI.dbsApi
4 > from DBSAPI.dbsApiException import *
5 > import common
6 > from crab_util import *
7 > import os
8 >
9 >
10 > # #######################################
11 > class DBSError(exceptions.Exception):
12 >    def __init__(self, errorName, errorMessage):
13 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
14 >        exceptions.Exception.__init__(self, args)
15 >        pass
16 >    
17 >    def getErrorMessage(self):
18 >        """ Return error message """
19 >        return "%s" % (self.args)
20 >
21 > # #######################################
22 > class DBSInvalidDataTierError(exceptions.Exception):
23 >    def __init__(self, errorName, errorMessage):
24 >        args='\nERROR DBS %s : %s \n'%(errorName,errorMessage)
25 >        exceptions.Exception.__init__(self, args)
26 >        pass
27 >    
28 >    def getErrorMessage(self):
29 >        """ Return error message """
30 >        return "%s" % (self.args)
31 >
32 > # #######################################
33 > class DBSInfoError:
34 >    def __init__(self, url):
35 >        print '\nERROR accessing DBS url : '+url+'\n'
36 >        pass
37  
38   # ####################################
39   class DataDiscoveryError(exceptions.Exception):
40 <  def __init__(self, errorMessage):
41 <   args=errorMessage
42 <   exceptions.Exception.__init__(self, args)
43 <   pass
44 <
45 <  def getErrorMessage(self):
46 <   """ Return exception error """
47 <   return "%s" % (self.args)
40 >    def __init__(self, errorMessage):
41 >        self.args=errorMessage
42 >        exceptions.Exception.__init__(self, self.args)
43 >        pass
44 >
45 >    def getErrorMessage(self):
46 >        """ Return exception error """
47 >        return "%s" % (self.args)
48  
49   # ####################################
50   class NotExistingDatasetError(exceptions.Exception):
51 <  def __init__(self, errorMessage):
52 <   args=errorMessage
53 <   exceptions.Exception.__init__(self, args)
54 <   pass
55 <
56 <  def getErrorMessage(self):
57 <   """ Return exception error """
58 <   return "%s" % (self.args)
51 >    def __init__(self, errorMessage):
52 >        self.args=errorMessage
53 >        exceptions.Exception.__init__(self, self.args)
54 >        pass
55 >
56 >    def getErrorMessage(self):
57 >        """ Return exception error """
58 >        return "%s" % (self.args)
59  
60   # ####################################
61   class NoDataTierinProvenanceError(exceptions.Exception):
62 <  def __init__(self, errorMessage):
63 <   args=errorMessage
64 <   exceptions.Exception.__init__(self, args)
65 <   pass
66 <
67 <  def getErrorMessage(self):
68 <   """ Return exception error """
69 <   return "%s" % (self.args)
62 >    def __init__(self, errorMessage):
63 >        self.args=errorMessage
64 >        exceptions.Exception.__init__(self, self.args)
65 >        pass
66 >
67 >    def getErrorMessage(self):
68 >        """ Return exception error """
69 >        return "%s" % (self.args)
70  
71   # ####################################
72   # class to find and extact info from published data
73   class DataDiscovery:
74 <    def __init__(self, owner, dataset, dataTiers, cfg_params):
74 >    def __init__(self, datasetPath, cfg_params, skipAnBlocks):
75  
76 < #       Attributes
77 <        self.owner = owner
46 <        self.dataset = dataset
47 <        self.dataTiers = dataTiers
76 >        #       Attributes
77 >        self.datasetPath = datasetPath
78          self.cfg_params = cfg_params
79 +        self.skipBlocks = skipAnBlocks
80  
81 <        self.dbspaths= []     # DBS output: list of dbspaths for all data
82 <        self.allblocks = []   # DBS output: list of map fileblocks-totevts for all dataset-owners
83 <        self.blocksinfo = {}  # DBS output: map fileblocks-totevts for the primary block, used internally to this class
84 < #DBS output: max events computed by method getMaxEvents
81 >        self.eventsPerBlock = {}  # DBS output: map fileblocks-events for collection
82 >        self.eventsPerFile = {}   # DBS output: map files-events
83 >        self.blocksinfo = {}      # DBS output: map fileblocks-files
84 >        self.maxEvents = 0        # DBS output: max events
85 >        self.parent = {}       # DBS output: max events
86  
87   # ####################################
88      def fetchDBSInfo(self):
# Line 58 | Line 90 | class DataDiscovery:
90          Contact DBS
91          """
92  
93 <        ## add the PU among the required data tiers if the Digi are requested
94 <        if (self.dataTiers.count('Digi')>0) & (self.dataTiers.count('PU')<=0) :
95 <          self.dataTiers.append('PU')
96 <
97 <        ## get info about the requested dataset
98 <        dbs=DBSInfo()
93 >        ## get DBS URL
94 >        dbs_url="http://cmsdbsprod.cern.ch/cms_dbs_prod_global/servlet/DBSServlet"
95 >        if (self.cfg_params.has_key('CMSSW.dbs_url')):
96 >            dbs_url=self.cfg_params['CMSSW.dbs_url']
97 >
98 >        common.logger.debug(3,"Accessing DBS at: "+dbs_url)
99 >
100 >        ## check if runs are selected
101 >        runselection = []
102 >        if (self.cfg_params.has_key('CMSSW.runselection')):
103 >            runselection = parseRange2(self.cfg_params['CMSSW.runselection'])
104 >
105 >        common.logger.debug(6,"runselection is: %s"%runselection)
106 >        ## service API
107 >        args = {}
108 >        args['url']     = dbs_url
109 >        args['level']   = 'CRITICAL'
110 >
111 >        ## check if has been requested to use the parent info
112 >        useParent = self.cfg_params.get('CMSSW.use_parent',False)
113 >
114 >        ## check if has been asked for a non default file to store/read analyzed fileBlocks  
115 >        defaultName = common.work_space.shareDir()+'AnalyzedBlocks.txt'  
116 >        fileBlocks_FileName = os.path.abspath(self.cfg_params.get('CMSSW.fileblocks_file',defaultName))
117 >
118 >        api = DBSAPI.dbsApi.DbsApi(args)
119          try:
120 <         self.datasets = dbs.getMatchingDatasets(self.owner, self.dataset)
121 <        except DBSError, ex:
122 <          raise DataDiscoveryError(ex.getErrorMessage())
123 <        if len(self.datasets) == 0:
124 <          raise DataDiscoveryError("Owner=%s, Dataset=%s unknown to DBS" % (self.owner, self.dataset))
125 <        if len(self.datasets) > 1:
126 <          raise DataDiscoveryError("Owner=%s, Dataset=%s is ambiguous" % (self.owner, self.dataset))
127 <        try:
128 <          self.dbsdataset = self.datasets[0].get('datasetPathName')
129 <          self.blocksinfo = dbs.getDatasetContents(self.dbsdataset)
130 <          self.allblocks.append (self.blocksinfo.keys ()) # add also the current fileblocksinfo
131 <          self.dbspaths.append(self.dbsdataset)
132 <        except DBSError, ex:
133 <          raise DataDiscoveryError(ex.getErrorMessage())
134 <        
135 <        if len(self.blocksinfo)<=0:
136 <         msg="\nERROR Data for %s do not exist in DBS! \n Check the dataset/owner variables in crab.cfg !"%self.dbsdataset
137 <         raise NotExistingDatasetError(msg)
138 <
139 <
140 <        ## get info about the parents
141 <        try:
142 <          parents=dbs.getDatasetProvenance(self.dbsdataset, self.dataTiers)
143 <        except DBSInvalidDataTierError, ex:
144 <          msg=ex.getErrorMessage()+' \n Check the data_tier variable in crab.cfg !\n'
145 <          raise DataDiscoveryError(msg)
146 <        except DBSError, ex:
147 <          raise DataDiscoveryError(ex.getErrorMessage())
148 <
149 <        ## check that the user asks for parent Data Tier really existing in the DBS provenance
150 <        self.checkParentDataTier(parents, self.dataTiers)
120 >            if len(runselection) <= 0 :
121 >                if useParent:
122 >                    allowedRetriveValue = ['retrive_parent',
123 >                                           'retrive_block',
124 >                                           'retrive_lumi',
125 >                                           'retrive_run'
126 >                                           ]
127 >                    files = api.listFiles(path=self.datasetPath, retriveList=allowedRetriveValue)
128 >                    common.logger.debug(5,"Set of input parameters used for DBS query : \n"+str(allowedRetriveValue))
129 >                    common.logger.write("Set of input parameters used for DBS query : \n"+str(allowedRetriveValue))
130 >                else:
131 >                    files = api.listDatasetFiles(self.datasetPath)
132 >            else :
133 >                files=[]
134 >                for arun in runselection:
135 >                    try:
136 >                        filesinrun = api.listFiles(path=self.datasetPath,retriveList=allowedRetriveValue,runNumber=arun)
137 >                        files.extend(filesinrun)
138 >                    except:
139 >                        msg="WARNING: problem extracting info from DBS for run %s "%arun
140 >                        common.logger.message(msg)
141 >                        pass
142 >
143 >        except DbsBadRequest, msg:
144 >            raise DataDiscoveryError(msg)
145 >        except DBSError, msg:
146 >            raise DataDiscoveryError(msg)
147 >
148 >        anFileBlocks = []
149 >        if self.skipBlocks: anFileBlocks = readTXTfile(self, fileBlocks_FileName)
150 >
151 >        # parse files and fill arrays
152 >        for file in files :
153 >            parList = []
154 >            # skip already analyzed blocks
155 >            fileblock = file['Block']['Name']
156 >            if fileblock not in anFileBlocks :
157 >                filename = file['LogicalFileName']
158 >                # asked retry the list of parent for the given child
159 >                if useParent: parList = [x['LogicalFileName'] for x in file['ParentList']]
160 >                self.parent[filename] = parList
161 >                if filename.find('.dat') < 0 :
162 >                    events    = file['NumberOfEvents']
163 >                    # number of events per block
164 >                    if fileblock in self.eventsPerBlock.keys() :
165 >                        self.eventsPerBlock[fileblock] += events
166 >                    else :
167 >                        self.eventsPerBlock[fileblock] = events
168 >                    # number of events per file
169 >                    self.eventsPerFile[filename] = events
170 >            
171 >                    # number of events per block
172 >                    if fileblock in self.blocksinfo.keys() :
173 >                        self.blocksinfo[fileblock].append(filename)
174 >                    else :
175 >                        self.blocksinfo[fileblock] = [filename]
176 >            
177 >                    # total number of events
178 >                    self.maxEvents += events
179 >        if  self.skipBlocks and len(self.eventsPerBlock.keys()) == 0:
180 >            msg = "No new fileblocks available for dataset: "+str(self.datasetPath)
181 >            raise  CrabException(msg)    
182 >
183 >        saveFblocks=''
184 >        for block in self.eventsPerBlock.keys() :
185 >            saveFblocks += str(block)+'\n'
186 >            common.logger.debug(6,"DBSInfo: total nevts %i in block %s "%(self.eventsPerBlock[block],block))
187 >        writeTXTfile(self, fileBlocks_FileName , saveFblocks)
188 >                      
189 >        if len(self.eventsPerBlock) <= 0:
190 >            raise NotExistingDatasetError(("\nNo data for %s in DBS\nPlease check"
191 >                                            + " dataset path variables in crab.cfg")
192 >                                            % self.datasetPath)
193  
100        ## for each parent get the corresponding fileblocks
101        try:
102          for p in parents:
103            ## fill a list of dbspaths
104            parentPath = p.get('parent').get('datasetPathName')
105            self.dbspaths.append (parentPath)
106            parentBlocks = dbs.getDatasetContents (parentPath)
107            self.allblocks.append (parentBlocks.keys ())  # add parent fileblocksinfo
108        except DBSError, ex:
109            raise DataDiscoveryError(ex.getErrorMessage())
194  
195   # #################################################
196 <    def checkParentDataTier(self, parents, dataTiers):
196 >    def getMaxEvents(self):
197          """
198 <         check that the data tiers requested by the user really exists in the provenance of the given dataset
198 >        max events
199          """
200 <        startType = string.split(self.dbsdataset,'/')[2]
117 <        # for example 'type' is PU and 'dataTier' is Hit
118 <        parentTypes = map(lambda p: p.get('type'), parents)
119 <        for tier in dataTiers:
120 <          if parentTypes.count(tier) <= 0 and tier != startType:
121 <            msg="\nERROR Data %s not published in DBS with asked data tiers : the data tier not found is %s !\n  Check the data_tier variable in crab.cfg !"%(self.dbsdataset,tier)
122 <            raise  NoDataTierinProvenanceError(msg)
123 <
200 >        return self.maxEvents
201  
202   # #################################################
203 <    def getMaxEvents(self):
203 >    def getEventsPerBlock(self):
204          """
205 <         max events of the primary dataset-owner
205 >        list the event collections structure by fileblock
206          """
207 <        ## loop over the fileblocks of the primary dataset-owner
131 <        nevts=0      
132 <        for blockevts in self.blocksinfo.values():
133 <          nevts=nevts+blockevts
134 <
135 <        return nevts
207 >        return self.eventsPerBlock
208  
209   # #################################################
210 <    def getDBSPaths(self):
210 >    def getEventsPerFile(self):
211          """
212 <         list the DBSpaths for all required data
212 >        list the event collections structure by file
213          """
214 <        return self.dbspaths
214 >        return self.eventsPerFile
215  
216   # #################################################
217 <    def getEVC(self):
217 >    def getFiles(self):
218          """
219 <         list the event collections structure by fileblock
219 >        return files grouped by fileblock
220          """
221 <        print "To be used by a more complex job splitting... TODO later... "
150 <        print "it requires changes in what's returned by DBSInfo.getDatasetContents and then fetchDBSInfo"
221 >        return self.blocksinfo        
222  
223   # #################################################
224 <    def getFileBlocks(self):
224 >    def getParent(self):
225          """
226 <         fileblocks for all required dataset-owners
226 >        return parent grouped by file
227          """
228 <        return self.allblocks        
228 >        return self.parent        
229  
230   ########################################################################
160
161

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines