ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.1 by slacapra, Thu Apr 6 16:18:17 2006 UTC vs.
Revision 1.221 by fanzago, Wed Jun 18 14:02:42 2008 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 + from BlackWhiteListParser import BlackWhiteListParser
6   import common
6
7 import DataDiscovery
8 import DataLocation
7   import Scram
8 + from LFNBaseName import *
9  
10 < import os, string, re
10 > import os, string, glob
11  
12   class Cmssw(JobType):
13 <    def __init__(self, cfg_params):
13 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
14          JobType.__init__(self, 'CMSSW')
15          common.logger.debug(3,'CMSSW::__init__')
16 +        self.skip_blocks = skip_blocks
17 +
18 +        self.argsList = []
19 +
20 +        self._params = {}
21 +        self.cfg_params = cfg_params
22 +        # init BlackWhiteListParser
23 +        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
24 +
25 +        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
26  
27 <        self.analisys_common_info = {}
27 >        # number of jobs requested to be created, limit obj splitting
28 >        self.ncjobs = ncjobs
29  
30          log = common.logger
31 <        
31 >
32          self.scram = Scram.Scram(cfg_params)
23        scramArea = ''
33          self.additional_inbox_files = []
34          self.scriptExe = ''
35          self.executable = ''
36 +        self.executable_arch = self.scram.getArch()
37          self.tgz_name = 'default.tgz'
38 +        self.scriptName = 'CMSSW.sh'
39 +        self.pset = ''
40 +        self.datasetPath = ''
41 +
42 +        # set FJR file name
43 +        self.fjrFileName = 'crab_fjr.xml'
44  
45          self.version = self.scram.getSWVersion()
46 <        common.analisys_common_info['sw_version'] = self.version
46 >        version_array = self.version.split('_')
47 >        self.CMSSW_major = 0
48 >        self.CMSSW_minor = 0
49 >        self.CMSSW_patch = 0
50 >        try:
51 >            self.CMSSW_major = int(version_array[1])
52 >            self.CMSSW_minor = int(version_array[2])
53 >            self.CMSSW_patch = int(version_array[3])
54 >        except:
55 >            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
56 >            raise CrabException(msg)
57  
58          ### collect Data cards
59 <        try:
60 <            self.owner = cfg_params['CMSSW.owner']
61 <            log.debug(6, "CMSSW::CMSSW(): owner = "+self.owner)
36 <            self.dataset = cfg_params['CMSSW.dataset']
37 <            log.debug(6, "CMSSW::CMSSW(): dataset = "+self.dataset)
38 <        except KeyError:
39 <            msg = "Error: owner and/or dataset not defined "
59 >
60 >        if not cfg_params.has_key('CMSSW.datasetpath'):
61 >            msg = "Error: datasetpath not defined "
62              raise CrabException(msg)
63 +        
64 +        ### Temporary: added to remove input file control in the case of PU
65 +        if not cfg_params.has_key('USER.dataset_pu'):
66 +            self.dataset_pu = 'NONE'
67 +        else:
68 +            self.dataset_pu = cfg_params['USER.dataset_pu']
69 +        ####    
70 +        
71 +        tmp =  cfg_params['CMSSW.datasetpath']
72 +        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
73 +        if string.lower(tmp)=='none':
74 +            self.datasetPath = None
75 +            self.selectNoInput = 1
76 +        else:
77 +            self.datasetPath = tmp
78 +            self.selectNoInput = 0
79  
80          self.dataTiers = []
81 <        try:
82 <            tmpDataTiers = string.split(cfg_params['CMSSW.data_tier'],',')
83 <            for tmp in tmpDataTiers:
46 <                tmp=string.strip(tmp)
47 <                self.dataTiers.append(tmp)
48 <                pass
49 <            pass
50 <        except KeyError:
51 <            pass
52 <        log.debug(6, "Cmssw::Cmssw(): dataTiers = "+str(self.dataTiers))
53 <
81 >        self.debugWrap = ''
82 >        self.debug_wrapper = cfg_params.get('USER.debug_wrapper',False)
83 >        if self.debug_wrapper: self.debugWrap='--debug'
84          ## now the application
85 <        try:
86 <            self.executable = cfg_params['CMSSW.executable']
57 <            log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
58 <            msg = "Default executable cmsRun overridden. Switch to " + self.executable
59 <            log.debug(3,msg)
60 <        except KeyError:
61 <            self.executable = 'cmsRun'
62 <            msg = "User executable not defined. Use cmsRun"
63 <            log.debug(3,msg)
64 <            pass
85 >        self.executable = cfg_params.get('CMSSW.executable','cmsRun')
86 >        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
87  
88 <        try:
89 <            self.pset = cfg_params['CMSSW.pset']
90 <            log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
88 >        if not cfg_params.has_key('CMSSW.pset'):
89 >            raise CrabException("PSet file missing. Cannot run cmsRun ")
90 >        self.pset = cfg_params['CMSSW.pset']
91 >        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
92 >        if self.pset.lower() != 'none' :
93              if (not os.path.exists(self.pset)):
94                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
95 <        except KeyError:
96 <            raise CrabException("PSet file missing. Cannot run cmsRun ")
95 >        else:
96 >            self.pset = None
97  
98          # output files
99 <        try:
100 <            self.output_file = []
99 >        ## stuff which must be returned always via sandbox
100 >        self.output_file_sandbox = []
101  
102 <            tmp = cfg_params['CMSSW.output_file']
103 <            if tmp != '':
80 <                tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
81 <                log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
82 <                for tmp in tmpOutFiles:
83 <                    tmp=string.strip(tmp)
84 <                    self.output_file.append(tmp)
85 <                    pass
102 >        # add fjr report by default via sandbox
103 >        self.output_file_sandbox.append(self.fjrFileName)
104  
105 <            else:
106 <                log.message("No output file defined: only stdout/err will be available")
107 <                pass
108 <            pass
109 <        except KeyError:
110 <            log.message("No output file defined: only stdout/err will be available")
111 <            pass
105 >        # other output files to be returned via sandbox or copied to SE
106 >        outfileflag = False
107 >        self.output_file = []
108 >        tmp = cfg_params.get('CMSSW.output_file',None)
109 >        if tmp :
110 >            self.output_file = [x.strip() for x in tmp.split(',')]
111 >            outfileflag = True #output found
112 >        #else:
113 >        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
114  
115          # script_exe file as additional file in inputSandbox
116 <        try:
117 <           self.scriptExe = cfg_params['CMSSW.script_exe']
118 <           self.additional_inbox_files.append(self.scriptExe)
119 <        except KeyError:
120 <           pass
121 <        if self.scriptExe != '':
122 <           if os.path.isfile(self.scriptExe):
123 <              pass
124 <           else:
125 <              log.message("WARNING. file "+self.scriptExe+" not found")
126 <              sys.exit()
127 <                  
116 >        self.scriptExe = cfg_params.get('USER.script_exe',None)
117 >        if self.scriptExe :
118 >            if not os.path.isfile(self.scriptExe):
119 >                msg ="ERROR. file "+self.scriptExe+" not found"
120 >                raise CrabException(msg)
121 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
122 >
123 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
124 >            msg ="Error. script_exe  not defined"
125 >            raise CrabException(msg)
126 >
127 >        # use parent files...
128 >        self.useParent = self.cfg_params.get('CMSSW.use_parent',False)
129 >
130          ## additional input files
131 <        try:
132 <            tmpAddFiles = string.split(cfg_params['CMSSW.additional_input_files'],',')
131 >        if cfg_params.has_key('USER.additional_input_files'):
132 >            tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
133              for tmp in tmpAddFiles:
134 <                tmp=string.strip(tmp)
135 <                self.additional_inbox_files.append(tmp)
134 >                tmp = string.strip(tmp)
135 >                dirname = ''
136 >                if not tmp[0]=="/": dirname = "."
137 >                files = []
138 >                if string.find(tmp,"*")>-1:
139 >                    files = glob.glob(os.path.join(dirname, tmp))
140 >                    if len(files)==0:
141 >                        raise CrabException("No additional input file found with this pattern: "+tmp)
142 >                else:
143 >                    files.append(tmp)
144 >                for file in files:
145 >                    if not os.path.exists(file):
146 >                        raise CrabException("Additional input file not found: "+file)
147 >                    pass
148 >                    self.additional_inbox_files.append(string.strip(file))
149                  pass
150              pass
151 <        except KeyError:
152 <            pass
151 >            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
152 >        pass
153  
154 <        try:
155 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
156 <        except KeyError:
157 <            msg = 'Must define total_number_of_events and job_number_of_events'
158 <            raise CrabException(msg)
159 <            
160 < #Marco: FirstEvent is nolonger used inside PSet
126 < #        try:
127 < #            self.first = int(cfg_params['CMSSW.first_event'])
128 < #        except KeyError:
129 < #            self.first = 0
130 < #            pass
131 < #        log.debug(6, "Orca::Orca(): total number of events = "+`self.total_number_of_events`)
132 <        #log.debug(6, "Orca::Orca(): events per job = "+`self.job_number_of_events`)
133 < #        log.debug(6, "Orca::Orca(): first event = "+`self.first`)
134 <        
135 <        CEBlackList = []
136 <        try:
137 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
138 <            for tmp in tmpBad:
139 <                tmp=string.strip(tmp)
140 <                CEBlackList.append(tmp)
141 <        except KeyError:
142 <            pass
143 <
144 <        self.reCEBlackList=[]
145 <        for bad in CEBlackList:
146 <            self.reCEBlackList.append(re.compile( bad ))
147 <
148 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
154 >        ## Events per job
155 >        if cfg_params.has_key('CMSSW.events_per_job'):
156 >            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
157 >            self.selectEventsPerJob = 1
158 >        else:
159 >            self.eventsPerJob = -1
160 >            self.selectEventsPerJob = 0
161  
162 <        CEWhiteList = []
163 <        try:
164 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
165 <            #tmpGood = ['cern']
166 <            for tmp in tmpGood:
167 <                tmp=string.strip(tmp)
168 <                #if (tmp == 'cnaf'): tmp = 'webserver' ########## warning: temp. patch
157 <                CEWhiteList.append(tmp)
158 <        except KeyError:
159 <            pass
162 >        ## number of jobs
163 >        if cfg_params.has_key('CMSSW.number_of_jobs'):
164 >            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
165 >            self.selectNumberOfJobs = 1
166 >        else:
167 >            self.theNumberOfJobs = 0
168 >            self.selectNumberOfJobs = 0
169  
170 <        #print 'CEWhiteList: ',CEWhiteList
171 <        self.reCEWhiteList=[]
172 <        for Good in CEWhiteList:
173 <            self.reCEWhiteList.append(re.compile( Good ))
170 >        if cfg_params.has_key('CMSSW.total_number_of_events'):
171 >            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
172 >            self.selectTotalNumberEvents = 1
173 >            if self.selectNumberOfJobs  == 1:
174 >                if (self.total_number_of_events != -1) and int(self.total_number_of_events) < int(self.theNumberOfJobs):
175 >                    msg = 'Must specify at least one event per job. total_number_of_events > number_of_jobs '
176 >                    raise CrabException(msg)
177 >        else:
178 >            self.total_number_of_events = 0
179 >            self.selectTotalNumberEvents = 0
180  
181 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
181 >        if self.pset != None:
182 >             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
183 >                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
184 >                 raise CrabException(msg)
185 >        else:
186 >             if (self.selectNumberOfJobs == 0):
187 >                 msg = 'Must specify  number_of_jobs.'
188 >                 raise CrabException(msg)
189 >
190 >        ## New method of dealing with seeds
191 >        self.incrementSeeds = []
192 >        self.preserveSeeds = []
193 >        if cfg_params.has_key('CMSSW.preserve_seeds'):
194 >            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
195 >            for tmp in tmpList:
196 >                tmp.strip()
197 >                self.preserveSeeds.append(tmp)
198 >        if cfg_params.has_key('CMSSW.increment_seeds'):
199 >            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
200 >            for tmp in tmpList:
201 >                tmp.strip()
202 >                self.incrementSeeds.append(tmp)
203 >
204 >        ## Old method of dealing with seeds
205 >        ## FUTURE: This is for old CMSSW and old CRAB. Can throw exceptions after a couple of CRAB releases and then
206 >        ## remove
207 >        self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None)
208 >        if self.sourceSeed:
209 >            print "pythia_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
210 >            self.incrementSeeds.append('sourceSeed')
211 >            self.incrementSeeds.append('theSource')
212 >
213 >        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
214 >        if self.sourceSeedVtx:
215 >            print "vtx_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
216 >            self.incrementSeeds.append('VtxSmeared')
217 >
218 >        self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None)
219 >        if self.sourceSeedG4:
220 >            print "g4_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
221 >            self.incrementSeeds.append('g4SimHits')
222 >
223 >        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
224 >        if self.sourceSeedMix:
225 >            print "mix_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
226 >            self.incrementSeeds.append('mix')
227 >
228 >        self.firstRun = cfg_params.get('CMSSW.first_run',None)
229 >
230 >
231 >        # Copy/return
232 >        self.copy_data = int(cfg_params.get('USER.copy_data',0))
233 >        self.return_data = int(cfg_params.get('USER.return_data',0))
234  
235          #DBSDLS-start
236 <        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
236 >        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
237          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
238          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
239 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
240          ## Perform the data location and discovery (based on DBS/DLS)
241 <        self.DataDiscoveryAndLocation(cfg_params)
242 <        #DBSDLS-end          
241 >        ## SL: Don't if NONE is specified as input (pythia use case)
242 >        blockSites = {}
243 >        if self.datasetPath:
244 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
245 >        #DBSDLS-end
246 >
247 >        ## Select Splitting
248 >        if self.selectNoInput:
249 >            if self.pset == None:
250 >                self.jobSplittingForScript()
251 >            else:
252 >                self.jobSplittingNoInput()
253 >        else:
254 >            self.jobSplittingByBlocks(blockSites)
255  
256 <        self.tgzNameWithPath = self.getTarBall(self.executable)
256 >        # modify Pset only the first time
257 >        if isNew:
258 >            if self.pset != None:
259 >                import PsetManipulator as pp
260 >                PsetEdit = pp.PsetManipulator(self.pset)
261 >                try:
262 >                    # Add FrameworkJobReport to parameter-set, set max events.
263 >                    # Reset later for data jobs by writeCFG which does all modifications
264 >                    PsetEdit.addCrabFJR(self.fjrFileName) # FUTURE: Job report addition not needed by CMSSW>1.5
265 >                    PsetEdit.maxEvent(self.eventsPerJob)
266 >                    PsetEdit.psetWriter(self.configFilename())
267 >                    ## If present, add TFileService to output files
268 >                    if not int(cfg_params.get('CMSSW.skip_TFileService_output',0)):
269 >                        tfsOutput = PsetEdit.getTFileService()
270 >                        if tfsOutput:
271 >                            if tfsOutput in self.output_file:
272 >                                common.logger.debug(5,"Output from TFileService "+tfsOutput+" already in output files")
273 >                            else:
274 >                                outfileflag = True #output found
275 >                                self.output_file.append(tfsOutput)
276 >                                common.logger.message("Adding "+tfsOutput+" to output files (from TFileService)")
277 >                            pass
278 >                        pass
279 >                    ## If present and requested, add PoolOutputModule to output files
280 >                    if int(cfg_params.get('CMSSW.get_edm_output',0)):
281 >                        edmOutput = PsetEdit.getPoolOutputModule()
282 >                        if edmOutput:
283 >                            if edmOutput in self.output_file:
284 >                                common.logger.debug(5,"Output from PoolOutputModule "+edmOutput+" already in output files")
285 >                            else:
286 >                                self.output_file.append(edmOutput)
287 >                                common.logger.message("Adding "+edmOutput+" to output files (from PoolOutputModule)")
288 >                            pass
289 >                        pass
290 >                except CrabException:
291 >                    msg='Error while manipulating ParameterSet: exiting...'
292 >                    raise CrabException(msg)
293 >            ## Prepare inputSandbox TarBall (only the first time)  
294 >            self.tgzNameWithPath = self.getTarBall(self.executable)
295  
296      def DataDiscoveryAndLocation(self, cfg_params):
297  
298 <        fun = "CMSSW::DataDiscoveryAndLocation()"
298 >        import DataDiscovery
299 >        import DataLocation
300 >        common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
301 >
302 >        datasetPath=self.datasetPath
303  
304          ## Contact the DBS
305 +        common.logger.message("Contacting Data Discovery Services ...")
306          try:
307 <            self.pubdata=DataDiscovery.DataDiscovery(self.owner,
185 <                                                     self.dataset,
186 <                                                     self.dataTiers,
187 <                                                     cfg_params)
307 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
308              self.pubdata.fetchDBSInfo()
309  
310          except DataDiscovery.NotExistingDatasetError, ex :
311              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
312              raise CrabException(msg)
193
313          except DataDiscovery.NoDataTierinProvenanceError, ex :
314              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
315              raise CrabException(msg)
316          except DataDiscovery.DataDiscoveryError, ex:
317 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
317 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
318              raise CrabException(msg)
319  
320 <        ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
321 <        self.DBSPaths=self.pubdata.getDBSPaths()
322 <        common.logger.message("Required data are : ")
323 <        for path in self.DBSPaths:
205 <            common.logger.message(" --> "+path )
320 >        self.filesbyblock=self.pubdata.getFiles()
321 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
322 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
323 >        self.parentFiles=self.pubdata.getParent()
324  
325          ## get max number of events
326 <        common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
209 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
210 <        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
211 <
212 <        ## get fileblocks corresponding to the required data
213 <        fb=self.pubdata.getFileBlocks()
214 <        common.logger.debug(5,"fileblocks are %s"%fb)
326 >        self.maxEvents=self.pubdata.getMaxEvents()
327  
328          ## Contact the DLS and build a list of sites hosting the fileblocks
329          try:
330 <            dataloc=DataLocation.DataLocation(self.pubdata.getFileBlocks(),cfg_params)
330 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
331              dataloc.fetchDLSInfo()
332          except DataLocation.DataLocationError , ex:
333              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
334              raise CrabException(msg)
223        
224        allsites=dataloc.getSites()
225        common.logger.debug(5,"sites are %s"%allsites)
226        sites=self.checkBlackList(allsites)
227        common.logger.debug(5,"sites are (after black list) %s"%sites)
228        sites=self.checkWhiteList(sites)
229        common.logger.debug(5,"sites are (after white list) %s"%sites)
335  
336 <        if len(sites)==0:
337 <            msg = 'No sites hosting all the needed data! Exiting... '
338 <            raise CrabException(msg)
339 <        common.logger.message("List of Sites hosting the data : "+str(sites))
340 <        common.logger.debug(6, "List of Sites: "+str(sites))
341 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
342 <        return
343 <        
344 <    def checkBlackList(self, allSites):
345 <        if len(self.reCEBlackList)==0: return allSites
346 <        sites = []
347 <        for site in allSites:
243 <            common.logger.debug(10,'Site '+site)
244 <            good=1
245 <            for re in self.reCEBlackList:
246 <                if re.search(site):
247 <                    common.logger.message('CE in black list, skipping site '+site)
248 <                    good=0
249 <                pass
250 <            if good: sites.append(site)
251 <        if len(sites) == 0:
252 <            common.logger.debug(3,"No sites found after BlackList")
336 >
337 >        sites = dataloc.getSites()
338 >        allSites = []
339 >        listSites = sites.values()
340 >        for listSite in listSites:
341 >            for oneSite in listSite:
342 >                allSites.append(oneSite)
343 >        allSites = self.uniquelist(allSites)
344 >
345 >        # screen output
346 >        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
347 >
348          return sites
349  
350 <    def checkWhiteList(self, allsites):
350 >    def jobSplittingByBlocks(self, blockSites):
351 >        """
352 >        Perform job splitting. Jobs run over an integer number of files
353 >        and no more than one block.
354 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
355 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
356 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
357 >                  self.maxEvents, self.filesbyblock
358 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
359 >              self.total_number_of_jobs - Total # of jobs
360 >              self.list_of_args - File(s) job will run on (a list of lists)
361 >        """
362  
363 <        if len(self.reCEWhiteList)==0: return pubDBUrls
364 <        sites = []
365 <        for site in allsites:
366 <            #print 'connecting to the URL ',url
367 <            good=0
368 <            for re in self.reCEWhiteList:
369 <                if re.search(site):
370 <                    common.logger.debug(5,'CE in white list, adding site '+site)
371 <                    good=1
372 <                if not good: continue
373 <                sites.append(site)
374 <        if len(sites) == 0:
375 <            common.logger.message("No sites found after WhiteList\n")
363 >        # ---- Handle the possible job splitting configurations ---- #
364 >        if (self.selectTotalNumberEvents):
365 >            totalEventsRequested = self.total_number_of_events
366 >        if (self.selectEventsPerJob):
367 >            eventsPerJobRequested = self.eventsPerJob
368 >            if (self.selectNumberOfJobs):
369 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
370 >
371 >        # If user requested all the events in the dataset
372 >        if (totalEventsRequested == -1):
373 >            eventsRemaining=self.maxEvents
374 >        # If user requested more events than are in the dataset
375 >        elif (totalEventsRequested > self.maxEvents):
376 >            eventsRemaining = self.maxEvents
377 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
378 >        # If user requested less events than are in the dataset
379          else:
380 <            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
381 <        return sites
380 >            eventsRemaining = totalEventsRequested
381 >
382 >        # If user requested more events per job than are in the dataset
383 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
384 >            eventsPerJobRequested = self.maxEvents
385 >
386 >        # For user info at end
387 >        totalEventCount = 0
388 >
389 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
390 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
391 >
392 >        if (self.selectNumberOfJobs):
393 >            common.logger.message("May not create the exact number_of_jobs requested.")
394 >
395 >        if ( self.ncjobs == 'all' ) :
396 >            totalNumberOfJobs = 999999999
397 >        else :
398 >            totalNumberOfJobs = self.ncjobs
399 >
400 >        blocks = blockSites.keys()
401 >        blockCount = 0
402 >        # Backup variable in case self.maxEvents counted events in a non-included block
403 >        numBlocksInDataset = len(blocks)
404 >
405 >        jobCount = 0
406 >        list_of_lists = []
407 >
408 >        # list tracking which jobs are in which jobs belong to which block
409 >        jobsOfBlock = {}
410 >
411 >        # ---- Iterate over the blocks in the dataset until ---- #
412 >        # ---- we've met the requested total # of events    ---- #
413 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
414 >            block = blocks[blockCount]
415 >            blockCount += 1
416 >            if block not in jobsOfBlock.keys() :
417 >                jobsOfBlock[block] = []
418 >
419 >            if self.eventsbyblock.has_key(block) :
420 >                numEventsInBlock = self.eventsbyblock[block]
421 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
422 >
423 >                files = self.filesbyblock[block]
424 >                numFilesInBlock = len(files)
425 >                if (numFilesInBlock <= 0):
426 >                    continue
427 >                fileCount = 0
428 >
429 >                # ---- New block => New job ---- #
430 >                parString = ""
431 >                # counter for number of events in files currently worked on
432 >                filesEventCount = 0
433 >                # flag if next while loop should touch new file
434 >                newFile = 1
435 >                # job event counter
436 >                jobSkipEventCount = 0
437 >
438 >                # ---- Iterate over the files in the block until we've met the requested ---- #
439 >                # ---- total # of events or we've gone over all the files in this block  ---- #
440 >                pString=''
441 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
442 >                    file = files[fileCount]
443 >                    if self.useParent:
444 >                        parent = self.parentFiles[file]
445 >                        for f in parent :
446 >                            pString += '\\\"' + f + '\\\"\,'
447 >                        common.logger.debug(6, "File "+str(file)+" has the following parents: "+str(parent))
448 >                        common.logger.write("File "+str(file)+" has the following parents: "+str(parent))
449 >                    if newFile :
450 >                        try:
451 >                            numEventsInFile = self.eventsbyfile[file]
452 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
453 >                            # increase filesEventCount
454 >                            filesEventCount += numEventsInFile
455 >                            # Add file to current job
456 >                            parString += '\\\"' + file + '\\\"\,'
457 >                            newFile = 0
458 >                        except KeyError:
459 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
460 >
461 >                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
462 >                    # if less events in file remain than eventsPerJobRequested
463 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
464 >                        # if last file in block
465 >                        if ( fileCount == numFilesInBlock-1 ) :
466 >                            # end job using last file, use remaining events in block
467 >                            # close job and touch new file
468 >                            fullString = parString[:-2]
469 >                            if self.useParent:
470 >                                fullParentString = pString[:-2]
471 >                                list_of_lists.append([fullString,fullParentString,str(-1),str(jobSkipEventCount)])
472 >                            else:
473 >                                list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
474 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
475 >                            self.jobDestination.append(blockSites[block])
476 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
477 >                            # fill jobs of block dictionary
478 >                            jobsOfBlock[block].append(jobCount+1)
479 >                            # reset counter
480 >                            jobCount = jobCount + 1
481 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
482 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
483 >                            jobSkipEventCount = 0
484 >                            # reset file
485 >                            pString = ""
486 >                            parString = ""
487 >                            filesEventCount = 0
488 >                            newFile = 1
489 >                            fileCount += 1
490 >                        else :
491 >                            # go to next file
492 >                            newFile = 1
493 >                            fileCount += 1
494 >                    # if events in file equal to eventsPerJobRequested
495 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
496 >                        # close job and touch new file
497 >                        fullString = parString[:-2]
498 >                        if self.useParent:
499 >                            fullParentString = pString[:-2]
500 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
501 >                        else:
502 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
503 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
504 >                        self.jobDestination.append(blockSites[block])
505 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
506 >                        jobsOfBlock[block].append(jobCount+1)
507 >                        # reset counter
508 >                        jobCount = jobCount + 1
509 >                        totalEventCount = totalEventCount + eventsPerJobRequested
510 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
511 >                        jobSkipEventCount = 0
512 >                        # reset file
513 >                        pString = ""
514 >                        parString = ""
515 >                        filesEventCount = 0
516 >                        newFile = 1
517 >                        fileCount += 1
518 >
519 >                    # if more events in file remain than eventsPerJobRequested
520 >                    else :
521 >                        # close job but don't touch new file
522 >                        fullString = parString[:-2]
523 >                        if self.useParent:
524 >                            fullParentString = pString[:-2]
525 >                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
526 >                        else:
527 >                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
528 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
529 >                        self.jobDestination.append(blockSites[block])
530 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
531 >                        jobsOfBlock[block].append(jobCount+1)
532 >                        # increase counter
533 >                        jobCount = jobCount + 1
534 >                        totalEventCount = totalEventCount + eventsPerJobRequested
535 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
536 >                        # calculate skip events for last file
537 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
538 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
539 >                        # remove all but the last file
540 >                        filesEventCount = self.eventsbyfile[file]
541 >                        if self.useParent:
542 >                            for f in parent : pString += '\\\"' + f + '\\\"\,'
543 >                        parString = '\\\"' + file + '\\\"\,'
544 >                    pass # END if
545 >                pass # END while (iterate over files in the block)
546 >        pass # END while (iterate over blocks in the dataset)
547 >        self.ncjobs = self.total_number_of_jobs = jobCount
548 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
549 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
550 >        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
551 >
552 >        # screen output
553 >        screenOutput = "List of jobs and available destination sites:\n\n"
554 >
555 >        # keep trace of block with no sites to print a warning at the end
556 >        noSiteBlock = []
557 >        bloskNoSite = []
558 >
559 >        blockCounter = 0
560 >        for block in blocks:
561 >            if block in jobsOfBlock.keys() :
562 >                blockCounter += 1
563 >                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),
564 >                    ','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
565 >                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
566 >                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
567 >                    bloskNoSite.append( blockCounter )
568 >
569 >        common.logger.message(screenOutput)
570 >        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
571 >            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
572 >            virgola = ""
573 >            if len(bloskNoSite) > 1:
574 >                virgola = ","
575 >            for block in bloskNoSite:
576 >                msg += ' ' + str(block) + virgola
577 >            msg += '\n               Related jobs:\n                 '
578 >            virgola = ""
579 >            if len(noSiteBlock) > 1:
580 >                virgola = ","
581 >            for range_jobs in noSiteBlock:
582 >                msg += str(range_jobs) + virgola
583 >            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
584 >            if self.cfg_params.has_key('EDG.se_white_list'):
585 >                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
586 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
587 >                msg += 'Please check if the dataset is available at this site!)\n'
588 >            if self.cfg_params.has_key('EDG.ce_white_list'):
589 >                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
590 >                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
591 >                msg += 'Please check if the dataset is available at this site!)\n'
592 >
593 >            common.logger.message(msg)
594 >
595 >        self.list_of_args = list_of_lists
596 >        return
597 >
598 >    def jobSplittingNoInput(self):
599 >        """
600 >        Perform job splitting based on number of event per job
601 >        """
602 >        common.logger.debug(5,'Splitting per events')
603 >
604 >        if (self.selectEventsPerJob):
605 >            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
606 >        if (self.selectNumberOfJobs):
607 >            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
608 >        if (self.selectTotalNumberEvents):
609 >            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
610 >
611 >        if (self.total_number_of_events < 0):
612 >            msg='Cannot split jobs per Events with "-1" as total number of events'
613 >            raise CrabException(msg)
614 >
615 >        if (self.selectEventsPerJob):
616 >            if (self.selectTotalNumberEvents):
617 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
618 >            elif(self.selectNumberOfJobs) :
619 >                self.total_number_of_jobs =self.theNumberOfJobs
620 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
621 >
622 >        elif (self.selectNumberOfJobs) :
623 >            self.total_number_of_jobs = self.theNumberOfJobs
624 >            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
625 >
626 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
627 >
628 >        # is there any remainder?
629 >        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
630 >
631 >        common.logger.debug(5,'Check  '+str(check))
632 >
633 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
634 >        if check > 0:
635 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
636 >
637 >        # argument is seed number.$i
638 >        self.list_of_args = []
639 >        for i in range(self.total_number_of_jobs):
640 >            ## Since there is no input, any site is good
641 >            self.jobDestination.append([""]) #must be empty to write correctly the xml
642 >            args=[]
643 >            if (self.firstRun):
644 >                ## pythia first run
645 >                args.append(str(self.firstRun)+str(i))
646 >            self.list_of_args.append(args)
647 >
648 >        return
649 >
650 >
651 >    def jobSplittingForScript(self):
652 >        """
653 >        Perform job splitting based on number of job
654 >        """
655 >        common.logger.debug(5,'Splitting per job')
656 >        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
657 >
658 >        self.total_number_of_jobs = self.theNumberOfJobs
659 >
660 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
661 >
662 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
663 >
664 >        # argument is seed number.$i
665 >        self.list_of_args = []
666 >        for i in range(self.total_number_of_jobs):
667 >            self.jobDestination.append([""])
668 >            self.list_of_args.append([str(i)])
669 >        return
670 >
671 >    def split(self, jobParams,firstJobID):
672 >
673 >        njobs = self.total_number_of_jobs
674 >        arglist = self.list_of_args
675 >        # create the empty structure
676 >        for i in range(njobs):
677 >            jobParams.append("")
678 >
679 >        listID=[]
680 >        listField=[]
681 >        for id in range(njobs):
682 >            job = id + int(firstJobID)
683 >            jobParams[id] = arglist[id]
684 >            listID.append(job+1)
685 >            job_ToSave ={}
686 >            concString = ' '
687 >            argu=''
688 >            if len(jobParams[id]):
689 >                argu +=   concString.join(jobParams[id] )
690 >            job_ToSave['arguments']= str(job+1)+' '+argu
691 >            job_ToSave['dlsDestination']= self.jobDestination[id]
692 >            listField.append(job_ToSave)
693 >            msg="Job "+str(job)+" Arguments:   "+str(job+1)+" "+argu+"\n"  \
694 >            +"                     Destination: "+str(self.jobDestination[id])
695 >            common.logger.debug(5,msg)
696 >        common._db.updateJob_(listID,listField)
697 >        self.argsList = (len(jobParams[0])+1)
698 >
699 >        return
700 >
701 >    def numberOfJobs(self):
702 >        return self.total_number_of_jobs
703  
704      def getTarBall(self, exe):
705          """
706          Return the TarBall with lib and exe
707          """
708 <        
279 <        # if it exist, just return it
280 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
708 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
709          if os.path.exists(self.tgzNameWithPath):
710              return self.tgzNameWithPath
711  
# Line 290 | Line 718 | class Cmssw(JobType):
718  
719          # First of all declare the user Scram area
720          swArea = self.scram.getSWArea_()
293        #print "swArea = ", swArea
294        swVersion = self.scram.getSWVersion()
295        #print "swVersion = ", swVersion
721          swReleaseTop = self.scram.getReleaseTop_()
722 <        #print "swReleaseTop = ", swReleaseTop
298 <        
722 >
723          ## check if working area is release top
724          if swReleaseTop == '' or swArea == swReleaseTop:
725 +            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
726              return
727  
728 <        filesToBeTarred = []
729 <        ## First find the executable
730 <        if (self.executable != ''):
731 <            exeWithPath = self.scram.findFile_(executable)
732 < #           print exeWithPath
733 <            if ( not exeWithPath ):
734 <                raise CrabException('User executable '+executable+' not found')
735 <
736 <            ## then check if it's private or not
737 <            if exeWithPath.find(swReleaseTop) == -1:
738 <                # the exe is private, so we must ship
739 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
740 <                path = swArea+'/'
741 <                exe = string.replace(exeWithPath, path,'')
742 <                filesToBeTarred.append(exe)
743 <                pass
744 <            else:
745 <                # the exe is from release, we'll find it on WN
728 >        import tarfile
729 >        try: # create tar ball
730 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
731 >            ## First find the executable
732 >            if (self.executable != ''):
733 >                exeWithPath = self.scram.findFile_(executable)
734 >                if ( not exeWithPath ):
735 >                    raise CrabException('User executable '+executable+' not found')
736 >
737 >                ## then check if it's private or not
738 >                if exeWithPath.find(swReleaseTop) == -1:
739 >                    # the exe is private, so we must ship
740 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
741 >                    path = swArea+'/'
742 >                    # distinguish case when script is in user project area or given by full path somewhere else
743 >                    if exeWithPath.find(path) >= 0 :
744 >                        exe = string.replace(exeWithPath, path,'')
745 >                        tar.add(path+exe,exe)
746 >                    else :
747 >                        tar.add(exeWithPath,os.path.basename(executable))
748 >                    pass
749 >                else:
750 >                    # the exe is from release, we'll find it on WN
751 >                    pass
752 >
753 >            ## Now get the libraries: only those in local working area
754 >            libDir = 'lib'
755 >            lib = swArea+'/' +libDir
756 >            common.logger.debug(5,"lib "+lib+" to be tarred")
757 >            if os.path.exists(lib):
758 >                tar.add(lib,libDir)
759 >
760 >            ## Now check if module dir is present
761 >            moduleDir = 'module'
762 >            module = swArea + '/' + moduleDir
763 >            if os.path.isdir(module):
764 >                tar.add(module,moduleDir)
765 >
766 >            ## Now check if any data dir(s) is present
767 >            self.dataExist = False
768 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
769 >            while len(todo_list):
770 >                entry, name = todo_list.pop()
771 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
772 >                    continue
773 >                if os.path.isdir(swArea+"/src/"+entry):
774 >                    entryPath = entry + '/'
775 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
776 >                    if name == 'data':
777 >                        self.dataExist=True
778 >                        common.logger.debug(5,"data "+entry+" to be tarred")
779 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
780 >                    pass
781                  pass
782 <
783 <        ## Now get the libraries: only those in local working area
784 <        libDir = 'lib'
785 <        lib = swArea+'/' +libDir
786 <        common.logger.debug(5,"lib "+lib+" to be tarred")
787 <        if os.path.exists(lib):
788 <            filesToBeTarred.append(libDir)
789 <
790 <        ## Now check if the Data dir is present
791 <        dataDir = 'src/Data/'
792 <        if os.path.isdir(swArea+'/'+dataDir):
793 <            filesToBeTarred.append(dataDir)
794 <
795 <        ## Create the tar-ball
796 <        if len(filesToBeTarred)>0:
797 <            cwd = os.getcwd()
798 <            os.chdir(swArea)
799 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
800 <            for line in filesToBeTarred:
801 <                tarcmd = tarcmd + line + ' '
802 <            cout = runCommand(tarcmd)
803 <            if not cout:
804 <                raise CrabException('Could not create tar-ball')
805 <            os.chdir(cwd)
806 <        else:
807 <            common.logger.debug(5,"No files to be to be tarred")
808 <        
809 <        return
810 <        
811 <    def wsSetupEnvironment(self, nj):
782 >
783 >            ### CMSSW ParameterSet
784 >            if not self.pset is None:
785 >                cfg_file = common.work_space.jobDir()+self.configFilename()
786 >                tar.add(cfg_file,self.configFilename())
787 >                common.logger.debug(5,"File added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
788 >
789 >
790 >            ## Add ProdCommon dir to tar
791 >            prodcommonDir = './'
792 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
793 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools','ProdCommon/Core','ProdCommon/MCPayloads', 'IMProv']
794 >            for file in neededStuff:
795 >                tar.add(prodcommonPath+file,prodcommonDir+file)
796 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
797 >
798 >            ##### ML stuff
799 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
800 >            path=os.environ['CRABDIR'] + '/python/'
801 >            for file in ML_file_list:
802 >                tar.add(path+file,file)
803 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
804 >
805 >            ##### Utils
806 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py']
807 >            for file in Utils_file_list:
808 >                tar.add(path+file,file)
809 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
810 >
811 >            ##### AdditionalFiles
812 >            for file in self.additional_inbox_files:
813 >                tar.add(file,string.split(file,'/')[-1])
814 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
815 >
816 >            tar.close()
817 >        except IOError:
818 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
819 >        except tarfile.TarError:
820 >            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
821 >
822 >        ## check for tarball size
823 >        tarballinfo = os.stat(self.tgzNameWithPath)
824 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
825 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
826 >
827 >        ## create tar-ball with ML stuff
828 >
829 >    def wsSetupEnvironment(self, nj=0):
830          """
831          Returns part of a job script which prepares
832          the execution environment for the job 'nj'.
833          """
834 +        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
835 +            psetName = 'pset.py'
836 +        else:
837 +            psetName = 'pset.cfg'
838          # Prepare JobType-independent part
839 <        txt = self.wsSetupCMSEnvironment_()
839 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
840 >        txt += 'echo ">>> setup environment"\n'
841 >        txt += 'if [ $middleware == LCG ]; then \n'
842 >        txt += self.wsSetupCMSLCGEnvironment_()
843 >        txt += 'elif [ $middleware == OSG ]; then\n'
844 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
845 >        txt += '    if [ ! $? == 0 ] ;then\n'
846 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
847 >        txt += '        job_exit_code=10016\n'
848 >        txt += '        func_exit\n'
849 >        txt += '    fi\n'
850 >        txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
851 >        txt += '\n'
852 >        txt += '    echo "Change to working directory: $WORKING_DIR"\n'
853 >        txt += '    cd $WORKING_DIR\n'
854 >        txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
855 >        txt += self.wsSetupCMSOSGEnvironment_()
856 >        txt += 'fi\n'
857  
858          # Prepare JobType-specific part
859          scram = self.scram.commandName()
860          txt += '\n\n'
861 <        txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
861 >        txt += 'echo ">>> specific cmssw setup environment:"\n'
862 >        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
863          txt += scram+' project CMSSW '+self.version+'\n'
864          txt += 'status=$?\n'
865          txt += 'if [ $status != 0 ] ; then\n'
866 <        txt += '   echo "SET_EXE_ENV 1 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
867 <        txt += '   echo "JOB_EXIT_STATUS = 5"\n'
868 <        txt += '   echo "SanityCheckCode = 5" | tee -a $RUNTIME_AREA/$repo\n'
369 <        txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
370 <        txt += '   exit 5 \n'
866 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
867 >        txt += '    job_exit_code=10034\n'
868 >        txt += '    func_exit\n'
869          txt += 'fi \n'
372        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
870          txt += 'cd '+self.version+'\n'
871 <        ### needed grep for bug in scramv1 ###
871 >        txt += 'SOFTWARE_DIR=`pwd`\n'
872 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
873          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
874 <
874 >        txt += 'if [ $? != 0 ] ; then\n'
875 >        txt += '    echo "ERROR ==> Problem with the command: "\n'
876 >        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
877 >        txt += '    job_exit_code=10034\n'
878 >        txt += '    func_exit\n'
879 >        txt += 'fi \n'
880          # Handle the arguments:
881          txt += "\n"
882 <        txt += "## ARGUMNETS: $1 Job Number\n"
380 <        # txt += "## ARGUMNETS: $2 First Event for this job\n"
381 <        # txt += "## ARGUMNETS: $3 Max Event for this job\n"
882 >        txt += "## number of arguments (first argument always jobnumber)\n"
883          txt += "\n"
884 <        txt += "narg=$#\n"
384 <        txt += "if [ $narg -lt 1 ]\n"
884 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
885          txt += "then\n"
886 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$narg+ \n"
887 <        txt += '    echo "JOB_EXIT_STATUS = 1"\n'
888 <        txt += '    echo "SanityCheckCode = 1" | tee -a $RUNTIME_AREA/$repo\n'
389 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
390 <        txt += "    exit 1\n"
886 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
887 >        txt += '    job_exit_code=50113\n'
888 >        txt += "    func_exit\n"
889          txt += "fi\n"
890          txt += "\n"
393        txt += "NJob=$1\n"
394        # txt += "FirstEvent=$2\n"
395        # txt += "MaxEvents=$3\n"
891  
892          # Prepare job-specific part
893          job = common.job_list[nj]
894 <        pset = os.path.basename(job.configFilename())
895 <        txt += '\n'
896 <        txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n'
402 <        # txt += 'if [ -e $RUNTIME_AREA/orcarc_$CE ] ; then\n'
403 <        # txt += '  cat $RUNTIME_AREA/orcarc_$CE .orcarc >> .orcarc_tmp\n'
404 <        # txt += '  mv .orcarc_tmp .orcarc\n'
405 <        # txt += 'fi\n'
406 <        # txt += 'if [ -e $RUNTIME_AREA/init_$CE.sh ] ; then\n'
407 <        # txt += '  cp $RUNTIME_AREA/init_$CE.sh init.sh\n'
408 <        # txt += 'fi\n'
894 >        if (self.datasetPath):
895 >            txt += '\n'
896 >            txt += 'DatasetPath='+self.datasetPath+'\n'
897  
898 <        if len(self.additional_inbox_files) > 0:
411 <            for file in self.additional_inbox_files:
412 <                txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
413 <                txt += '   cp $RUNTIME_AREA/'+file+' .\n'
414 <                txt += '   chmod +x '+file+'\n'
415 <                txt += 'fi\n'
416 <            pass
417 <
418 <        # txt += '\n'
419 <        # txt += 'chmod +x ./init.sh\n'
420 <        # txt += './init.sh\n'
421 <        # txt += 'exitStatus=$?\n'
422 <        # txt += 'if [ $exitStatus != 0 ] ; then\n'
423 <        # txt += '  echo "SET_EXE_ENV 1 ==> ERROR StageIn init script failed"\n'
424 <        # txt += '  echo "JOB_EXIT_STATUS = $exitStatus" \n'
425 <        # txt += '  echo "SanityCheckCode = $exitStatus" | tee -a $RUNTIME_AREA/$repo\n'
426 <        # txt += '  dumpStatus $RUNTIME_AREA/$repo\n'
427 <        # txt += '  exit $exitStatus\n'
428 <        # txt += 'fi\n'
429 <        # txt += "echo 'SET_EXE_ENV 0 ==> job setup ok'\n"
430 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
431 <
432 <        # txt += 'echo "FirstEvent=$FirstEvent" >> .orcarc\n'
433 <        # txt += 'echo "MaxEvents=$MaxEvents" >> .orcarc\n'
434 <        # if self.ML:
435 <        #     txt += 'echo "MonalisaJobId=$NJob" >> .orcarc\n'
898 >            datasetpath_split = self.datasetPath.split("/")
899  
900 <        txt += '\n'
901 <        txt += 'echo "***** cat pset.cfg *********"\n'
902 <        txt += 'cat pset.cfg\n'
903 <        txt += 'echo "****** end pset.cfg ********"\n'
900 >            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
901 >            txt += 'DataTier='+datasetpath_split[2]+'\n'
902 >            txt += 'ApplicationFamily=cmsRun\n'
903 >
904 >        else:
905 >            txt += 'DatasetPath=MCDataTier\n'
906 >            txt += 'PrimaryDataset=null\n'
907 >            txt += 'DataTier=null\n'
908 >            txt += 'ApplicationFamily=MCDataTier\n'
909 >        if self.pset != None:
910 >            pset = os.path.basename(job.configFilename())
911 >            txt += '\n'
912 >            txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
913 >            if (self.datasetPath): # standard job
914 >                txt += 'InputFiles=${args[1]}; export InputFiles\n'
915 >                if (self.useParent):  
916 >                    txt += 'ParentFiles=${args[2]}; export ParentFiles\n'
917 >                    txt += 'MaxEvents=${args[3]}; export MaxEvents\n'
918 >                    txt += 'SkipEvents=${args[4]}; export SkipEvents\n'
919 >                else:
920 >                    txt += 'MaxEvents=${args[2]}; export MaxEvents\n'
921 >                    txt += 'SkipEvents=${args[3]}; export SkipEvents\n'
922 >                txt += 'echo "Inputfiles:<$InputFiles>"\n'
923 >                if (self.useParent): txt += 'echo "ParentFiles:<$ParentFiles>"\n'
924 >                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
925 >                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
926 >            else:  # pythia like job
927 >                txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
928 >                txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
929 >                txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
930 >                txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
931 >                if (self.firstRun):
932 >                    txt += 'FirstRun=${args[1]}; export FirstRun\n'
933 >                    txt += 'echo "FirstRun: <$FirstRun>"\n'
934 >
935 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
936 >
937 >
938 >        if self.pset != None:
939 >            # FUTURE: Can simply for 2_1_x and higher
940 >            txt += '\n'
941 >            if self.debug_wrapper==True:
942 >                txt += 'echo "***** cat ' + psetName + ' *********"\n'
943 >                txt += 'cat ' + psetName + '\n'
944 >                txt += 'echo "****** end ' + psetName + ' ********"\n'
945 >                txt += '\n'
946 >            txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
947 >            txt += 'echo "PSETHASH = $PSETHASH" \n'
948 >            txt += '\n'
949          return txt
950  
951 <    def modifySteeringCards(self, nj):
951 >    def wsUntarSoftware(self, nj=0):
952          """
953 <        modify the card provided by the user,
954 <        writing a new card into share dir
953 >        Put in the script the commands to build an executable
954 >        or a library.
955          """
956 <        
956 >
957 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
958 >
959 >        if os.path.isfile(self.tgzNameWithPath):
960 >            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
961 >            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
962 >            if  self.debug_wrapper:
963 >                txt += 'ls -Al \n'
964 >            txt += 'untar_status=$? \n'
965 >            txt += 'if [ $untar_status -ne 0 ]; then \n'
966 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
967 >            txt += '   job_exit_code=$untar_status\n'
968 >            txt += '   func_exit\n'
969 >            txt += 'else \n'
970 >            txt += '   echo "Successful untar" \n'
971 >            txt += 'fi \n'
972 >            txt += '\n'
973 >            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
974 >            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
975 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
976 >            txt += 'else\n'
977 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
978 >            txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
979 >            txt += 'fi\n'
980 >            txt += '\n'
981 >
982 >            pass
983 >
984 >        return txt
985 >
986 >    def wsBuildExe(self, nj=0):
987 >        """
988 >        Put in the script the commands to build an executable
989 >        or a library.
990 >        """
991 >
992 >        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
993 >        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
994 >
995 >        txt += 'rm -r lib/ module/ \n'
996 >        txt += 'mv $RUNTIME_AREA/lib/ . \n'
997 >        txt += 'mv $RUNTIME_AREA/module/ . \n'
998 >        if self.dataExist == True:
999 >            txt += 'rm -r src/ \n'
1000 >            txt += 'mv $RUNTIME_AREA/src/ . \n'
1001 >        if len(self.additional_inbox_files)>0:
1002 >            for file in self.additional_inbox_files:
1003 >                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
1004 >        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
1005 >        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
1006 >
1007 >        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
1008 >        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
1009 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
1010 >        txt += 'else\n'
1011 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
1012 >        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1013 >        txt += 'fi\n'
1014 >        txt += '\n'
1015 >
1016 >        return txt
1017 >
1018 >
1019      def executableName(self):
1020 <        return self.executable
1020 >        if self.scriptExe:
1021 >            return "sh "
1022 >        else:
1023 >            return self.executable
1024  
1025      def executableArgs(self):
1026 <        return "-p pset.cfg"
1026 >        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
1027 >        if self.scriptExe:#CarlosDaniele
1028 >            return   self.scriptExe + " $NJob"
1029 >        else:
1030 >            ex_args = ""
1031 >            # FUTURE: This tests the CMSSW version. Can remove code as versions deprecated
1032 >            # Framework job report
1033 >            if (self.CMSSW_major >= 1 and self.CMSSW_minor >= 5) or (self.CMSSW_major >= 2):
1034 >                ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
1035 >            # Type of config file
1036 >            if self.CMSSW_major >= 2 :
1037 >                ex_args += " -p pset.py"
1038 >            else:
1039 >                ex_args += " -p pset.cfg"
1040 >            return ex_args
1041  
1042      def inputSandbox(self, nj):
1043          """
1044          Returns a list of filenames to be put in JDL input sandbox.
1045          """
1046          inp_box = []
460        # dict added to delete duplicate from input sandbox file list
461        seen = {}
462        ## code
1047          if os.path.isfile(self.tgzNameWithPath):
1048              inp_box.append(self.tgzNameWithPath)
1049 <        ## config
1050 <        inp_box.append(common.job_list[nj].configFilename())
467 <        ## additional input files
468 <        for file in self.additional_inbox_files:
469 <            inp_box.append(common.work_space.cwdDir()+file)
470 <        #print "sono inputSandbox, inp_box = ", inp_box
1049 >        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
1050 >        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
1051          return inp_box
1052  
1053      def outputSandbox(self, nj):
# Line 476 | Line 1056 | class Cmssw(JobType):
1056          """
1057          out_box = []
1058  
479        stdout=common.job_list[nj].stdout()
480        stderr=common.job_list[nj].stderr()
481
1059          ## User Declared output files
1060 <        for out in self.output_file:
1061 <            n_out = nj + 1
1062 <            out_box.append(self.numberFile_(out,str(n_out)))
1060 >        for out in (self.output_file+self.output_file_sandbox):
1061 >            n_out = nj + 1
1062 >            out_box.append(numberFile(out,str(n_out)))
1063          return out_box
487        return []
488
489    def prepareSteeringCards(self):
490        """
491        Make initial modifications of the user's steering card file.
492        """
493        infile = open(self.pset,'r')
494            
495        outfile = open(common.work_space.jobDir()+self.name()+'.cfg', 'w')
496          
497        outfile.write('\n\n##### The following cards have been created by CRAB: DO NOT TOUCH #####\n')
1064  
499        outfile.write('InputCollections=/System/'+self.owner+'/'+self.dataset+'/'+self.dataset+'\n')
500
501        infile.close()
502        outfile.close()
503        return
1065  
1066      def wsRenameOutput(self, nj):
1067          """
1068          Returns part of a job script which renames the produced files.
1069          """
1070  
1071 <        txt = '\n'
1072 <        file_list = ''
1073 <        for fileWithSuffix in self.output_file:
1074 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1075 <            file_list=file_list+output_file_num+','
1076 <            txt += '\n'
1077 <            txt += 'ls \n'
1071 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
1072 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1073 >        txt += 'echo ">>> current directory content:"\n'
1074 >        if self.debug_wrapper:
1075 >            txt += 'ls -Al\n'
1076 >        txt += '\n'
1077 >
1078 >        for fileWithSuffix in (self.output_file):
1079 >            output_file_num = numberFile(fileWithSuffix, '$NJob')
1080              txt += '\n'
1081 <            txt += 'ls '+fileWithSuffix+'\n'
1082 <            txt += 'exe_result=$?\n'
1083 <            txt += 'if [ $exe_result -ne 0 ] ; then\n'
1084 <            txt += '   echo "ERROR: No output file to manage"\n'
1085 <            txt += '   echo "JOB_EXIT_STATUS = $exe_result"\n'
1086 <            txt += '   echo "SanityCheckCode = $exe_result" | tee -a $RUNTIME_AREA/$repo\n'
1087 <            txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
1088 <            txt += '   exit $exe_result \n'
1081 >            txt += '# check output file\n'
1082 >            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1083 >            if (self.copy_data == 1):  # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA
1084 >                txt += '    mv '+fileWithSuffix+' '+output_file_num+'\n'
1085 >                txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1086 >            else:
1087 >                txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1088 >                txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1089              txt += 'else\n'
1090 <            txt += '   cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1090 >            txt += '    job_exit_code=60302\n'
1091 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
1092 >            if common.scheduler.name().upper() == 'CONDOR_G':
1093 >                txt += '    if [ $middleware == OSG ]; then \n'
1094 >                txt += '        echo "prepare dummy output file"\n'
1095 >                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1096 >                txt += '    fi \n'
1097              txt += 'fi\n'
1098 <            txt += 'cd $RUNTIME_AREA\n'
1099 <                      
1100 <            pass
1101 <      
1102 <        file_list=file_list[:-1]
1103 <        txt += 'file_list='+file_list+'\n'
1098 >        file_list = []
1099 >        for fileWithSuffix in (self.output_file):
1100 >             file_list.append(numberFile(fileWithSuffix, '$NJob'))
1101 >
1102 >        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
1103 >        txt += '\n'
1104 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
1105 >        txt += 'echo ">>> current directory content:"\n'
1106 >        if self.debug_wrapper:
1107 >            txt += 'ls -Al\n'
1108 >        txt += '\n'
1109 >        txt += 'cd $RUNTIME_AREA\n'
1110 >        txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
1111          return txt
1112  
1113 <    def numberFile_(self, file, txt):
1113 >    def getRequirements(self, nj=[]):
1114          """
1115 <        append _'txt' before last extension of a file
1115 >        return job requirements to add to jdl files
1116          """
1117 <        p = string.split(file,".")
1118 <        # take away last extension
1119 <        name = p[0]
1120 <        for x in p[1:-1]:
1121 <           name=name+"."+x
1122 <        # add "_txt"
1123 <        if len(p)>1:
1124 <          ext = p[len(p)-1]
1125 <          #result = name + '_' + str(txt) + "." + ext
1126 <          result = name + '_' + txt + "." + ext
1117 >        req = ''
1118 >        if self.version:
1119 >            req='Member("VO-cms-' + \
1120 >                 self.version + \
1121 >                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1122 >        if self.executable_arch:
1123 >            req+=' && Member("VO-cms-' + \
1124 >                 self.executable_arch + \
1125 >                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1126 >
1127 >        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1128 >        if common.scheduler.name() == "glitecoll":
1129 >            req += ' && other.GlueCEStateStatus == "Production" '
1130 >
1131 >        return req
1132 >
1133 >    def configFilename(self):
1134 >        """ return the config filename """
1135 >        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
1136 >        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
1137 >          return self.name()+'.py'
1138          else:
1139 <          #result = name + '_' + str(txt)
553 <          result = name + '_' + txt
554 <        
555 <        return result
1139 >          return self.name()+'.cfg'
1140  
1141 <    def getRequirements(self):
1141 >    def wsSetupCMSOSGEnvironment_(self):
1142          """
1143 <        return job requirements to add to jdl files
1143 >        Returns part of a job script which is prepares
1144 >        the execution environment and which is common for all CMS jobs.
1145          """
1146 <        req = ''
1147 <        if common.analisys_common_info['sites']:
1148 <            if common.analisys_common_info['sw_version']:
1149 <                req='Member("VO-cms-' + \
1150 <                     common.analisys_common_info['sw_version'] + \
1151 <                     '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1152 <            if len(common.analisys_common_info['sites'])>0:
1153 <                req = req + ' && ('
1154 <                for i in range(len(common.analisys_common_info['sites'])):
1155 <                    req = req + 'other.GlueCEInfoHostName == "' \
1156 <                         + common.analisys_common_info['sites'][i] + '"'
1157 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
1158 <                        req = req + ' || '
1159 <            req = req + ')'
1160 <        #print "req = ", req
1161 <        return req
1146 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
1147 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
1148 >        txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
1149 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1150 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1151 >        txt += '    if [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1152 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1153 >        txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1154 >        txt += '    else\n'
1155 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1156 >        txt += '        job_exit_code=10020\n'
1157 >        txt += '        func_exit\n'
1158 >        txt += '    fi\n'
1159 >        txt += '\n'
1160 >        txt += '    echo "==> setup cms environment ok"\n'
1161 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1162 >
1163 >        return txt
1164 >
1165 >    def wsSetupCMSLCGEnvironment_(self):
1166 >        """
1167 >        Returns part of a job script which is prepares
1168 >        the execution environment and which is common for all CMS jobs.
1169 >        """
1170 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
1171 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
1172 >        txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
1173 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1174 >        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
1175 >        txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
1176 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
1177 >        txt += '        job_exit_code=10031\n'
1178 >        txt += '        func_exit\n'
1179 >        txt += '    else\n'
1180 >        txt += '        echo "Sourcing environment... "\n'
1181 >        txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1182 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1183 >        txt += '            job_exit_code=10020\n'
1184 >        txt += '            func_exit\n'
1185 >        txt += '        fi\n'
1186 >        txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1187 >        txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1188 >        txt += '        result=$?\n'
1189 >        txt += '        if [ $result -ne 0 ]; then\n'
1190 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1191 >        txt += '            job_exit_code=10032\n'
1192 >        txt += '            func_exit\n'
1193 >        txt += '        fi\n'
1194 >        txt += '    fi\n'
1195 >        txt += '    \n'
1196 >        txt += '    echo "==> setup cms environment ok"\n'
1197 >        return txt
1198 >
1199 >    def modifyReport(self, nj):
1200 >        """
1201 >        insert the part of the script that modifies the FrameworkJob Report
1202 >        """
1203 >        txt = '\n#Written by cms_cmssw::modifyReport\n'
1204 >        publish_data = int(self.cfg_params.get('USER.publish_data',0))
1205 >        if (publish_data == 1):
1206 >            processedDataset = self.cfg_params['USER.publish_data_name']
1207 >            LFNBaseName = LFNBase(processedDataset)
1208 >
1209 >            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
1210 >            txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
1211 >            txt += 'else\n'
1212 >            txt += '    FOR_LFN=/copy_problems/ \n'
1213 >            txt += '    SE=""\n'
1214 >            txt += '    SE_PATH=""\n'
1215 >            txt += 'fi\n'
1216 >
1217 >            txt += 'echo ">>> Modify Job Report:" \n'
1218 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1219 >            txt += 'ProcessedDataset='+processedDataset+'\n'
1220 >            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1221 >            txt += 'echo "SE = $SE"\n'
1222 >            txt += 'echo "SE_PATH = $SE_PATH"\n'
1223 >            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1224 >            txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1225 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1226 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1227 >            txt += 'modifyReport_result=$?\n'
1228 >            txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1229 >            txt += '    modifyReport_result=70500\n'
1230 >            txt += '    job_exit_code=$modifyReport_result\n'
1231 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
1232 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
1233 >            txt += 'else\n'
1234 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1235 >            txt += 'fi\n'
1236 >        return txt
1237 >
1238 >    def wsParseFJR(self):
1239 >        """
1240 >        Parse the FrameworkJobReport to obtain useful infos
1241 >        """
1242 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
1243 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
1244 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1245 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1246 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1247 >        if self.debug_wrapper :
1248 >            txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1249 >        txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1250 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1251 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1252 >        txt += '            cat $RUNTIME_AREA/crab_fjr_NJob.xml\n'
1253 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1254 >        txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1255 >        txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
1256 >        txt += '        else\n'
1257 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1258 >        txt += '        fi\n'
1259 >        txt += '    else\n'
1260 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1261 >        txt += '    fi\n'
1262 >          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1263 >
1264 >        if (self.datasetPath and self.dataset_pu == 'NONE'):
1265 >          # VERIFY PROCESSED DATA
1266 >            txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1267 >            txt += '      echo ">>> Verify list of processed files:"\n'
1268 >            txt += '      echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1269 >            txt += '      python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1270 >            txt += '      cat input-files.txt  | sort | uniq > tmp.txt\n'
1271 >            txt += '      mv tmp.txt input-files.txt\n'
1272 >            txt += '      echo "cat input-files.txt"\n'
1273 >            txt += '      echo "----------------------"\n'
1274 >            txt += '      cat input-files.txt\n'
1275 >            txt += '      cat processed-files.txt | sort | uniq > tmp.txt\n'
1276 >            txt += '      mv tmp.txt processed-files.txt\n'
1277 >            txt += '      echo "----------------------"\n'
1278 >            txt += '      echo "cat processed-files.txt"\n'
1279 >            txt += '      echo "----------------------"\n'
1280 >            txt += '      cat processed-files.txt\n'
1281 >            txt += '      echo "----------------------"\n'
1282 >            txt += '      diff -q input-files.txt processed-files.txt\n'
1283 >            txt += '      fileverify_status=$?\n'
1284 >            txt += '      if [ $fileverify_status -ne 0 ]; then\n'
1285 >            txt += '         executable_exit_status=30001\n'
1286 >            txt += '         echo "ERROR ==> not all input files processed"\n'
1287 >            txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1288 >            txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1289 >            txt += '      fi\n'
1290 >            txt += '    fi\n'
1291 >            txt += '\n'
1292 >        txt += 'else\n'
1293 >        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1294 >        txt += 'fi\n'
1295 >        txt += '\n'
1296 >        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1297 >        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1298 >        txt += 'job_exit_code=$executable_exit_status\n'
1299 >
1300 >        return txt
1301 >
1302 >    def setParam_(self, param, value):
1303 >        self._params[param] = value
1304 >
1305 >    def getParams(self):
1306 >        return self._params
1307 >
1308 >    def uniquelist(self, old):
1309 >        """
1310 >        remove duplicates from a list
1311 >        """
1312 >        nd={}
1313 >        for e in old:
1314 >            nd[e]=0
1315 >        return nd.keys()
1316 >
1317 >    def outList(self):
1318 >        """
1319 >        check the dimension of the output files
1320 >        """
1321 >        txt = ''
1322 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1323 >        listOutFiles = []
1324 >        stdout = 'CMSSW_$NJob.stdout'
1325 >        stderr = 'CMSSW_$NJob.stderr'
1326 >        if (self.return_data == 1):
1327 >            for file in (self.output_file+self.output_file_sandbox):
1328 >                listOutFiles.append(numberFile(file, '$NJob'))
1329 >            listOutFiles.append(stdout)
1330 >            listOutFiles.append(stderr)
1331 >        else:
1332 >            for file in (self.output_file_sandbox):
1333 >                listOutFiles.append(numberFile(file, '$NJob'))
1334 >            listOutFiles.append(stdout)
1335 >            listOutFiles.append(stderr)
1336 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1337 >        txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1338 >        txt += 'export filesToCheck\n'
1339 >        return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines