ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.221 by fanzago, Wed Jun 18 14:02:42 2008 UTC vs.
Revision 1.365 by spiga, Tue Nov 9 21:10:07 2010 UTC

# Line 1 | Line 1
1 +
2 + __revision__ = "$Id$"
3 + __version__ = "$Revision$"
4 +
5   from JobType import JobType
2 from crab_logger import Logger
6   from crab_exceptions import *
7   from crab_util import *
5 from BlackWhiteListParser import BlackWhiteListParser
8   import common
9 + import re
10   import Scram
11 < from LFNBaseName import *
11 > from Splitter import JobSplitter
12 > from Downloader import Downloader
13 > try:
14 >    import json
15 > except:
16 >    import simplejson as json
17  
18 + from IMProv.IMProvNode import IMProvNode
19 + from IMProv.IMProvLoader import loadIMProvFile
20   import os, string, glob
21 + from xml.dom import pulldom
22  
23   class Cmssw(JobType):
24      def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
25          JobType.__init__(self, 'CMSSW')
26 <        common.logger.debug(3,'CMSSW::__init__')
26 >        common.logger.debug('CMSSW::__init__')
27          self.skip_blocks = skip_blocks
28 <
29 <        self.argsList = []
19 <
28 >        self.argsList = 2
29 >        self.NumEvents=0
30          self._params = {}
31          self.cfg_params = cfg_params
32 <        # init BlackWhiteListParser
33 <        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
32 >        ### FEDE FOR MULTI ###
33 >        self.var_filter=''
34  
35 <        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
35 >        ### Temporary patch to automatically skip the ISB size check:
36 >        self.server = self.cfg_params.get('CRAB.server_name',None) or \
37 >                      self.cfg_params.get('CRAB.use_server',0)
38 >        self.local  = common.scheduler.name().upper() in ['LSF','CAF','CONDOR','SGE','PBS']
39 >        size = 9.5
40 >        if self.server :
41 >            size = 1000
42 >        elif self.local:
43 >            size = 9999999
44 >        self.MaxTarBallSize = float(self.cfg_params.get('GRID.maxtarballsize',size))
45  
46          # number of jobs requested to be created, limit obj splitting
47          self.ncjobs = ncjobs
48  
30        log = common.logger
31
49          self.scram = Scram.Scram(cfg_params)
50          self.additional_inbox_files = []
51          self.scriptExe = ''
# Line 39 | Line 56 | class Cmssw(JobType):
56          self.pset = ''
57          self.datasetPath = ''
58  
59 +        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
60          # set FJR file name
61          self.fjrFileName = 'crab_fjr.xml'
62  
63          self.version = self.scram.getSWVersion()
64 +        common.logger.log(10-1,"CMSSW version is: "+str(self.version))
65          version_array = self.version.split('_')
66          self.CMSSW_major = 0
67          self.CMSSW_minor = 0
# Line 55 | Line 74 | class Cmssw(JobType):
74              msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
75              raise CrabException(msg)
76  
77 +        if self.CMSSW_major < 2 or (self.CMSSW_major == 2 and self.CMSSW_minor < 1):
78 +            msg = "CRAB supports CMSSW >= 2_1_x only. Use an older CRAB version."
79 +            raise CrabException(msg)
80 +            """
81 +            As CMSSW versions are dropped we can drop more code:
82 +            2.x dropped: drop check for lumi range setting
83 +            """
84 +        self.checkCMSSWVersion()
85          ### collect Data cards
86  
60        if not cfg_params.has_key('CMSSW.datasetpath'):
61            msg = "Error: datasetpath not defined "
62            raise CrabException(msg)
63        
87          ### Temporary: added to remove input file control in the case of PU
88 <        if not cfg_params.has_key('USER.dataset_pu'):
89 <            self.dataset_pu = 'NONE'
67 <        else:
68 <            self.dataset_pu = cfg_params['USER.dataset_pu']
69 <        ####    
70 <        
88 >        self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
89 >
90          tmp =  cfg_params['CMSSW.datasetpath']
91 <        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
92 <        if string.lower(tmp)=='none':
91 >        common.logger.log(10-1, "CMSSW::CMSSW(): datasetPath = "+tmp)
92 >
93 >        if tmp =='':
94 >            msg = "Error: datasetpath not defined "
95 >            raise CrabException(msg)
96 >        elif string.lower(tmp)=='none':
97              self.datasetPath = None
98              self.selectNoInput = 1
99 +            self.primaryDataset = 'null'
100          else:
101              self.datasetPath = tmp
102              self.selectNoInput = 0
103 +            ll = len(self.datasetPath.split("/"))
104 +            if (ll < 4):
105 +                msg = 'Your datasetpath has a invalid format ' + self.datasetPath + '\n'
106 +                msg += 'Expected a path in format /PRIMARY/PROCESSED/TIER1-TIER2 or /PRIMARY/PROCESSED/TIER/METHOD for ADS'
107 +                raise CrabException(msg)
108 +            self.primaryDataset = self.datasetPath.split("/")[1]
109 +            self.dataTier = self.datasetPath.split("/")[2]
110 +
111 +        # Analysis dataset is primary/processed/tier/definition
112 +        self.ads = False
113 +        if self.datasetPath:
114 +            self.ads = len(self.datasetPath.split("/")) > 4
115 +        self.lumiMask = self.cfg_params.get('CMSSW.lumi_mask',None)
116 +        self.lumiParams = self.cfg_params.get('CMSSW.total_number_of_lumis',None) or \
117 +                          self.cfg_params.get('CMSSW.lumis_per_job',None)
118 +
119 +        # FUTURE: Can remove this check
120 +        if self.ads and self.CMSSW_major < 3:
121 +            common.logger.info('Warning: Analysis dataset support is incomplete in CMSSW 2_x.')
122 +            common.logger.info('  Only file level, not lumi level, granularity is supported.')
123 +
124 +        self.debugWrap=''
125 +        self.debug_wrapper = int(cfg_params.get('USER.debug_wrapper',0))
126 +        if self.debug_wrapper == 1: self.debugWrap='--debug'
127  
80        self.dataTiers = []
81        self.debugWrap = ''
82        self.debug_wrapper = cfg_params.get('USER.debug_wrapper',False)
83        if self.debug_wrapper: self.debugWrap='--debug'
128          ## now the application
129 +        self.managedGenerators = ['madgraph', 'comphep', 'lhe']
130 +        self.generator = cfg_params.get('CMSSW.generator','pythia').lower()
131          self.executable = cfg_params.get('CMSSW.executable','cmsRun')
132 <        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
132 >        common.logger.log(10-1, "CMSSW::CMSSW(): executable = "+self.executable)
133  
134          if not cfg_params.has_key('CMSSW.pset'):
135              raise CrabException("PSet file missing. Cannot run cmsRun ")
136          self.pset = cfg_params['CMSSW.pset']
137 <        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
137 >        common.logger.log(10-1, "Cmssw::Cmssw(): PSet file = "+self.pset)
138          if self.pset.lower() != 'none' :
139              if (not os.path.exists(self.pset)):
140                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
# Line 109 | Line 155 | class Cmssw(JobType):
155          if tmp :
156              self.output_file = [x.strip() for x in tmp.split(',')]
157              outfileflag = True #output found
112        #else:
113        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
158  
115        # script_exe file as additional file in inputSandbox
159          self.scriptExe = cfg_params.get('USER.script_exe',None)
160          if self.scriptExe :
161              if not os.path.isfile(self.scriptExe):
# Line 120 | Line 163 | class Cmssw(JobType):
163                  raise CrabException(msg)
164              self.additional_inbox_files.append(string.strip(self.scriptExe))
165  
166 +        self.AdditionalArgs = cfg_params.get('USER.script_arguments',None)
167 +        if self.AdditionalArgs : self.AdditionalArgs = string.replace(self.AdditionalArgs,',',' ')
168 +
169          if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
170              msg ="Error. script_exe  not defined"
171              raise CrabException(msg)
172  
173 <        # use parent files...
174 <        self.useParent = self.cfg_params.get('CMSSW.use_parent',False)
173 >        # use parent files...
174 >        self.useParent = int(self.cfg_params.get('CMSSW.use_parent',0))
175  
176          ## additional input files
177          if cfg_params.has_key('USER.additional_input_files'):
# Line 148 | Line 194 | class Cmssw(JobType):
194                      self.additional_inbox_files.append(string.strip(file))
195                  pass
196              pass
197 <            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
197 >            common.logger.debug("Additional input files: "+str(self.additional_inbox_files))
198          pass
199  
154        ## Events per job
155        if cfg_params.has_key('CMSSW.events_per_job'):
156            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
157            self.selectEventsPerJob = 1
158        else:
159            self.eventsPerJob = -1
160            self.selectEventsPerJob = 0
161
162        ## number of jobs
163        if cfg_params.has_key('CMSSW.number_of_jobs'):
164            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
165            self.selectNumberOfJobs = 1
166        else:
167            self.theNumberOfJobs = 0
168            self.selectNumberOfJobs = 0
169
170        if cfg_params.has_key('CMSSW.total_number_of_events'):
171            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
172            self.selectTotalNumberEvents = 1
173            if self.selectNumberOfJobs  == 1:
174                if (self.total_number_of_events != -1) and int(self.total_number_of_events) < int(self.theNumberOfJobs):
175                    msg = 'Must specify at least one event per job. total_number_of_events > number_of_jobs '
176                    raise CrabException(msg)
177        else:
178            self.total_number_of_events = 0
179            self.selectTotalNumberEvents = 0
180
181        if self.pset != None:
182             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
183                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
184                 raise CrabException(msg)
185        else:
186             if (self.selectNumberOfJobs == 0):
187                 msg = 'Must specify  number_of_jobs.'
188                 raise CrabException(msg)
200  
201          ## New method of dealing with seeds
202          self.incrementSeeds = []
# Line 201 | Line 212 | class Cmssw(JobType):
212                  tmp.strip()
213                  self.incrementSeeds.append(tmp)
214  
215 <        ## Old method of dealing with seeds
205 <        ## FUTURE: This is for old CMSSW and old CRAB. Can throw exceptions after a couple of CRAB releases and then
206 <        ## remove
207 <        self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None)
208 <        if self.sourceSeed:
209 <            print "pythia_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
210 <            self.incrementSeeds.append('sourceSeed')
211 <            self.incrementSeeds.append('theSource')
212 <
213 <        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
214 <        if self.sourceSeedVtx:
215 <            print "vtx_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
216 <            self.incrementSeeds.append('VtxSmeared')
217 <
218 <        self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None)
219 <        if self.sourceSeedG4:
220 <            print "g4_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
221 <            self.incrementSeeds.append('g4SimHits')
222 <
223 <        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
224 <        if self.sourceSeedMix:
225 <            print "mix_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
226 <            self.incrementSeeds.append('mix')
227 <
228 <        self.firstRun = cfg_params.get('CMSSW.first_run',None)
229 <
230 <
231 <        # Copy/return
215 >        # Copy/return/publish
216          self.copy_data = int(cfg_params.get('USER.copy_data',0))
217          self.return_data = int(cfg_params.get('USER.return_data',0))
218 +        self.publish_data = int(cfg_params.get('USER.publish_data',0))
219 +        if (self.publish_data == 1):
220 +            if not cfg_params.has_key('USER.publish_data_name'):
221 +                raise CrabException('Cannot publish output data, because you did not specify USER.publish_data_name parameter in the crab.cfg file')
222 +            else:
223 +                self.processedDataset = cfg_params['USER.publish_data_name']
224  
225 +        self.conf = {}
226 +        self.conf['pubdata'] = None
227 +        # number of jobs requested to be created, limit obj splitting DD
228          #DBSDLS-start
229          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
230          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
# Line 240 | Line 233 | class Cmssw(JobType):
233          ## Perform the data location and discovery (based on DBS/DLS)
234          ## SL: Don't if NONE is specified as input (pythia use case)
235          blockSites = {}
236 <        if self.datasetPath:
237 <            blockSites = self.DataDiscoveryAndLocation(cfg_params)
238 <        #DBSDLS-end
236 > #wmbs
237 >        self.automation = int(self.cfg_params.get('WMBS.automation',0))
238 >        if self.automation == 0:
239 >            if self.datasetPath:
240 >                blockSites = self.DataDiscoveryAndLocation(cfg_params)
241 >            #DBSDLS-end
242 >            self.conf['blockSites']=blockSites
243 >
244 >            ## Select Splitting
245 >            splitByRun = int(cfg_params.get('CMSSW.split_by_run',0))
246 >
247 >            if self.selectNoInput:
248 >                if self.pset == None:
249 >                    self.algo = 'ForScript'
250 >                else:
251 >                    self.algo = 'NoInput'
252 >                    self.conf['managedGenerators']=self.managedGenerators
253 >                    self.conf['generator']=self.generator
254 >            elif self.ads or self.lumiMask or self.lumiParams:
255 >                self.algo = 'LumiBased'
256 >                if splitByRun:
257 >                    msg = "Cannot combine split by run with lumi_mask, ADS, " \
258 >                          "or lumis_per_job. Use split by lumi mode instead."
259 >                    raise CrabException(msg)
260  
261 <        ## Select Splitting
262 <        if self.selectNoInput:
249 <            if self.pset == None:
250 <                self.jobSplittingForScript()
261 >            elif splitByRun ==1:
262 >                self.algo = 'RunBased'
263              else:
264 <                self.jobSplittingNoInput()
265 <        else:
254 <            self.jobSplittingByBlocks(blockSites)
264 >                self.algo = 'EventBased'
265 >            common.logger.debug("Job splitting method: %s" % self.algo)
266  
267 +            splitter = JobSplitter(self.cfg_params,self.conf)
268 +            self.dict = splitter.Algos()[self.algo]()
269 +
270 +        self.argsFile= '%s/arguments.xml'%common.work_space.shareDir()
271 +        self.rootArgsFilename= 'arguments'
272          # modify Pset only the first time
273          if isNew:
274 <            if self.pset != None:
275 <                import PsetManipulator as pp
276 <                PsetEdit = pp.PsetManipulator(self.pset)
277 <                try:
278 <                    # Add FrameworkJobReport to parameter-set, set max events.
279 <                    # Reset later for data jobs by writeCFG which does all modifications
280 <                    PsetEdit.addCrabFJR(self.fjrFileName) # FUTURE: Job report addition not needed by CMSSW>1.5
281 <                    PsetEdit.maxEvent(self.eventsPerJob)
282 <                    PsetEdit.psetWriter(self.configFilename())
283 <                    ## If present, add TFileService to output files
284 <                    if not int(cfg_params.get('CMSSW.skip_TFileService_output',0)):
285 <                        tfsOutput = PsetEdit.getTFileService()
286 <                        if tfsOutput:
287 <                            if tfsOutput in self.output_file:
288 <                                common.logger.debug(5,"Output from TFileService "+tfsOutput+" already in output files")
289 <                            else:
290 <                                outfileflag = True #output found
291 <                                self.output_file.append(tfsOutput)
292 <                                common.logger.message("Adding "+tfsOutput+" to output files (from TFileService)")
293 <                            pass
294 <                        pass
295 <                    ## If present and requested, add PoolOutputModule to output files
296 <                    if int(cfg_params.get('CMSSW.get_edm_output',0)):
297 <                        edmOutput = PsetEdit.getPoolOutputModule()
298 <                        if edmOutput:
299 <                            if edmOutput in self.output_file:
300 <                                common.logger.debug(5,"Output from PoolOutputModule "+edmOutput+" already in output files")
301 <                            else:
302 <                                self.output_file.append(edmOutput)
303 <                                common.logger.message("Adding "+edmOutput+" to output files (from PoolOutputModule)")
304 <                            pass
305 <                        pass
306 <                except CrabException:
307 <                    msg='Error while manipulating ParameterSet: exiting...'
308 <                    raise CrabException(msg)
309 <            ## Prepare inputSandbox TarBall (only the first time)  
310 <            self.tgzNameWithPath = self.getTarBall(self.executable)
274 >            if self.pset != None: self.ModifyPset()
275 >
276 >            ## Prepare inputSandbox TarBall (only the first time)
277 >            self.tarNameWithPath = self.getTarBall(self.executable)
278 >
279 >
280 >    def ModifyPset(self):
281 >        import PsetManipulator as pp
282 >
283 >        # If pycfg_params set, fake out the config script
284 >        # to make it think it was called with those args
285 >        pycfg_params = self.cfg_params.get('CMSSW.pycfg_params',None)
286 >        if pycfg_params:
287 >            trueArgv = sys.argv
288 >            sys.argv = [self.pset]
289 >            sys.argv.extend(pycfg_params.split(' '))
290 >        PsetEdit = pp.PsetManipulator(self.pset)
291 >        if pycfg_params: # Restore original sys.argv
292 >            sys.argv = trueArgv
293 >
294 >        try:
295 >            # Add FrameworkJobReport to parameter-set, set max events.
296 >            # Reset later for data jobs by writeCFG which does all modifications
297 >            PsetEdit.maxEvent(1)
298 >            PsetEdit.skipEvent(0)
299 >            PsetEdit.psetWriter(self.configFilename())
300 >            ## If present, add TFileService to output files
301 >            if not int(self.cfg_params.get('CMSSW.skip_tfileservice_output',0)):
302 >                tfsOutput = PsetEdit.getTFileService()
303 >                if tfsOutput:
304 >                    if tfsOutput in self.output_file:
305 >                        common.logger.debug("Output from TFileService "+tfsOutput+" already in output files")
306 >                    else:
307 >                        outfileflag = True #output found
308 >                        self.output_file.append(tfsOutput)
309 >                        common.logger.info("Adding "+tfsOutput+" (from TFileService) to list of output files")
310 >                    pass
311 >                pass
312 >
313 >            # If requested, add PoolOutputModule to output files
314 >            ### FEDE FOR MULTI ###
315 >            #edmOutput = PsetEdit.getPoolOutputModule()
316 >            edmOutputDict = PsetEdit.getPoolOutputModule()
317 >            common.logger.debug("(test) edmOutputDict = "+str(edmOutputDict))
318 >            filter_dict = {}
319 >            for key in edmOutputDict.keys():
320 >                filter_dict[key]=edmOutputDict[key]['dataset']
321 >            common.logger.debug("(test) filter_dict for multi =  "+str(filter_dict))
322 >
323 >            #### in CMSSW.sh: export var_filter
324 >
325 >            self.var_filter = json.dumps(filter_dict)
326 >            common.logger.debug("(test) var_filter for multi =  "+self.var_filter)
327 >
328 >            edmOutput = edmOutputDict.keys()
329 >            if int(self.cfg_params.get('CMSSW.get_edm_output',0)):
330 >                if edmOutput:
331 >                    for outputFile in edmOutput:
332 >                        if outputFile in self.output_file:
333 >                            common.logger.debug("Output from PoolOutputModule "+outputFile+" already in output files")
334 >                        else:
335 >                            self.output_file.append(outputFile)
336 >                            common.logger.info("Adding "+outputFile+" (from PoolOutputModule) to list of output files")
337 >            # not requested, check anyhow to avoid accidental T2 overload
338 >            else:
339 >                if edmOutput:
340 >                    missedFiles = []
341 >                    for outputFile in edmOutput:
342 >                        if outputFile not in self.output_file:
343 >                            missedFiles.append(outputFile)
344 >                    if missedFiles:
345 >                        msg  = "ERROR: PoolOutputModule(s) are present in your ParameteSet %s \n"%self.pset
346 >                        msg += "    but the file(s) produced ( %s ) are not in the list of output files\n" % ', '.join(missedFiles)
347 >                        msg += "WARNING: please remove them. If you want to keep them, add the file(s) to output_files or use CMSSW.get_edm_output = 1\n"
348 >                        if int(self.cfg_params.get('CMSSW.ignore_edm_output',0)):
349 >                            msg += "    CMSSW.ignore_edm_output==1 : Hope you know what you are doing...\n"
350 >                            common.logger.info(msg)
351 >                        else :
352 >                            raise CrabException(msg)
353 >
354 >            if (PsetEdit.getBadFilesSetting()):
355 >                msg = "WARNING: You have set skipBadFiles to True. This will continue processing on some errors and you may not be notified."
356 >                common.logger.info(msg)
357 >
358 >        except CrabException, msg:
359 >            common.logger.info(str(msg))
360 >            msg='Error while manipulating ParameterSet (see previous message, if any): exiting...'
361 >            raise CrabException(msg)
362 >
363 >        valid = re.compile('^[\w\.\-]+$')
364 >        for fileName in self.output_file:
365 >            if not valid.match(fileName):
366 >                msg = "The file %s may only contain alphanumeric characters and -, _, ." % fileName
367 >                raise CrabException(msg)
368 >
369  
370      def DataDiscoveryAndLocation(self, cfg_params):
371  
372          import DataDiscovery
373          import DataLocation
374 <        common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
374 >        common.logger.log(10-1,"CMSSW::DataDiscoveryAndLocation()")
375  
376          datasetPath=self.datasetPath
377  
378          ## Contact the DBS
379 <        common.logger.message("Contacting Data Discovery Services ...")
379 >        common.logger.info("Contacting Data Discovery Services ...")
380          try:
381              self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
382              self.pubdata.fetchDBSInfo()
# Line 318 | Line 392 | class Cmssw(JobType):
392              raise CrabException(msg)
393  
394          self.filesbyblock=self.pubdata.getFiles()
395 <        self.eventsbyblock=self.pubdata.getEventsPerBlock()
322 <        self.eventsbyfile=self.pubdata.getEventsPerFile()
323 <        self.parentFiles=self.pubdata.getParent()
395 >        self.conf['pubdata']=self.pubdata
396  
397          ## get max number of events
398          self.maxEvents=self.pubdata.getMaxEvents()
# Line 329 | Line 401 | class Cmssw(JobType):
401          try:
402              dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
403              dataloc.fetchDLSInfo()
404 +
405          except DataLocation.DataLocationError , ex:
406              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
407              raise CrabException(msg)
408  
409  
410 <        sites = dataloc.getSites()
410 >        unsorted_sites = dataloc.getSites()
411 >        sites = self.filesbyblock.fromkeys(self.filesbyblock,'')
412 >        for lfn in self.filesbyblock.keys():
413 >            if unsorted_sites.has_key(lfn):
414 >                sites[lfn]=unsorted_sites[lfn]
415 >            else:
416 >                sites[lfn]=[]
417 >
418 >        if len(sites)==0:
419 >            msg = 'ERROR ***: no location for any of the blocks of this dataset: \n\t %s \n'%datasetPath
420 >            msg += "\tMaybe the dataset is located only at T1's (or at T0), where analysis jobs are not allowed\n"
421 >            msg += "\tPlease check DataDiscovery page https://cmsweb.cern.ch/dbs_discovery/\n"
422 >            raise CrabException(msg)
423 >
424          allSites = []
425          listSites = sites.values()
426          for listSite in listSites:
427              for oneSite in listSite:
428                  allSites.append(oneSite)
429 <        allSites = self.uniquelist(allSites)
344 <
345 <        # screen output
346 <        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
429 >        [allSites.append(it) for it in allSites if not allSites.count(it)]
430  
348        return sites
349
350    def jobSplittingByBlocks(self, blockSites):
351        """
352        Perform job splitting. Jobs run over an integer number of files
353        and no more than one block.
354        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
355        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
356                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
357                  self.maxEvents, self.filesbyblock
358        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
359              self.total_number_of_jobs - Total # of jobs
360              self.list_of_args - File(s) job will run on (a list of lists)
361        """
362
363        # ---- Handle the possible job splitting configurations ---- #
364        if (self.selectTotalNumberEvents):
365            totalEventsRequested = self.total_number_of_events
366        if (self.selectEventsPerJob):
367            eventsPerJobRequested = self.eventsPerJob
368            if (self.selectNumberOfJobs):
369                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
370
371        # If user requested all the events in the dataset
372        if (totalEventsRequested == -1):
373            eventsRemaining=self.maxEvents
374        # If user requested more events than are in the dataset
375        elif (totalEventsRequested > self.maxEvents):
376            eventsRemaining = self.maxEvents
377            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
378        # If user requested less events than are in the dataset
379        else:
380            eventsRemaining = totalEventsRequested
381
382        # If user requested more events per job than are in the dataset
383        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
384            eventsPerJobRequested = self.maxEvents
385
386        # For user info at end
387        totalEventCount = 0
388
389        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
390            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
391
392        if (self.selectNumberOfJobs):
393            common.logger.message("May not create the exact number_of_jobs requested.")
394
395        if ( self.ncjobs == 'all' ) :
396            totalNumberOfJobs = 999999999
397        else :
398            totalNumberOfJobs = self.ncjobs
399
400        blocks = blockSites.keys()
401        blockCount = 0
402        # Backup variable in case self.maxEvents counted events in a non-included block
403        numBlocksInDataset = len(blocks)
404
405        jobCount = 0
406        list_of_lists = []
407
408        # list tracking which jobs are in which jobs belong to which block
409        jobsOfBlock = {}
410
411        # ---- Iterate over the blocks in the dataset until ---- #
412        # ---- we've met the requested total # of events    ---- #
413        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
414            block = blocks[blockCount]
415            blockCount += 1
416            if block not in jobsOfBlock.keys() :
417                jobsOfBlock[block] = []
418
419            if self.eventsbyblock.has_key(block) :
420                numEventsInBlock = self.eventsbyblock[block]
421                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
422
423                files = self.filesbyblock[block]
424                numFilesInBlock = len(files)
425                if (numFilesInBlock <= 0):
426                    continue
427                fileCount = 0
428
429                # ---- New block => New job ---- #
430                parString = ""
431                # counter for number of events in files currently worked on
432                filesEventCount = 0
433                # flag if next while loop should touch new file
434                newFile = 1
435                # job event counter
436                jobSkipEventCount = 0
437
438                # ---- Iterate over the files in the block until we've met the requested ---- #
439                # ---- total # of events or we've gone over all the files in this block  ---- #
440                pString=''
441                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
442                    file = files[fileCount]
443                    if self.useParent:
444                        parent = self.parentFiles[file]
445                        for f in parent :
446                            pString += '\\\"' + f + '\\\"\,'
447                        common.logger.debug(6, "File "+str(file)+" has the following parents: "+str(parent))
448                        common.logger.write("File "+str(file)+" has the following parents: "+str(parent))
449                    if newFile :
450                        try:
451                            numEventsInFile = self.eventsbyfile[file]
452                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
453                            # increase filesEventCount
454                            filesEventCount += numEventsInFile
455                            # Add file to current job
456                            parString += '\\\"' + file + '\\\"\,'
457                            newFile = 0
458                        except KeyError:
459                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
460
461                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
462                    # if less events in file remain than eventsPerJobRequested
463                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
464                        # if last file in block
465                        if ( fileCount == numFilesInBlock-1 ) :
466                            # end job using last file, use remaining events in block
467                            # close job and touch new file
468                            fullString = parString[:-2]
469                            if self.useParent:
470                                fullParentString = pString[:-2]
471                                list_of_lists.append([fullString,fullParentString,str(-1),str(jobSkipEventCount)])
472                            else:
473                                list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
474                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
475                            self.jobDestination.append(blockSites[block])
476                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
477                            # fill jobs of block dictionary
478                            jobsOfBlock[block].append(jobCount+1)
479                            # reset counter
480                            jobCount = jobCount + 1
481                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
482                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
483                            jobSkipEventCount = 0
484                            # reset file
485                            pString = ""
486                            parString = ""
487                            filesEventCount = 0
488                            newFile = 1
489                            fileCount += 1
490                        else :
491                            # go to next file
492                            newFile = 1
493                            fileCount += 1
494                    # if events in file equal to eventsPerJobRequested
495                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
496                        # close job and touch new file
497                        fullString = parString[:-2]
498                        if self.useParent:
499                            fullParentString = pString[:-2]
500                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
501                        else:
502                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
503                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
504                        self.jobDestination.append(blockSites[block])
505                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
506                        jobsOfBlock[block].append(jobCount+1)
507                        # reset counter
508                        jobCount = jobCount + 1
509                        totalEventCount = totalEventCount + eventsPerJobRequested
510                        eventsRemaining = eventsRemaining - eventsPerJobRequested
511                        jobSkipEventCount = 0
512                        # reset file
513                        pString = ""
514                        parString = ""
515                        filesEventCount = 0
516                        newFile = 1
517                        fileCount += 1
518
519                    # if more events in file remain than eventsPerJobRequested
520                    else :
521                        # close job but don't touch new file
522                        fullString = parString[:-2]
523                        if self.useParent:
524                            fullParentString = pString[:-2]
525                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
526                        else:
527                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
528                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
529                        self.jobDestination.append(blockSites[block])
530                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
531                        jobsOfBlock[block].append(jobCount+1)
532                        # increase counter
533                        jobCount = jobCount + 1
534                        totalEventCount = totalEventCount + eventsPerJobRequested
535                        eventsRemaining = eventsRemaining - eventsPerJobRequested
536                        # calculate skip events for last file
537                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
538                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
539                        # remove all but the last file
540                        filesEventCount = self.eventsbyfile[file]
541                        if self.useParent:
542                            for f in parent : pString += '\\\"' + f + '\\\"\,'
543                        parString = '\\\"' + file + '\\\"\,'
544                    pass # END if
545                pass # END while (iterate over files in the block)
546        pass # END while (iterate over blocks in the dataset)
547        self.ncjobs = self.total_number_of_jobs = jobCount
548        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
549            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
550        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
431  
432          # screen output
433 <        screenOutput = "List of jobs and available destination sites:\n\n"
434 <
435 <        # keep trace of block with no sites to print a warning at the end
436 <        noSiteBlock = []
437 <        bloskNoSite = []
438 <
439 <        blockCounter = 0
560 <        for block in blocks:
561 <            if block in jobsOfBlock.keys() :
562 <                blockCounter += 1
563 <                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),
564 <                    ','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
565 <                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
566 <                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
567 <                    bloskNoSite.append( blockCounter )
568 <
569 <        common.logger.message(screenOutput)
570 <        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
571 <            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
572 <            virgola = ""
573 <            if len(bloskNoSite) > 1:
574 <                virgola = ","
575 <            for block in bloskNoSite:
576 <                msg += ' ' + str(block) + virgola
577 <            msg += '\n               Related jobs:\n                 '
578 <            virgola = ""
579 <            if len(noSiteBlock) > 1:
580 <                virgola = ","
581 <            for range_jobs in noSiteBlock:
582 <                msg += str(range_jobs) + virgola
583 <            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
584 <            if self.cfg_params.has_key('EDG.se_white_list'):
585 <                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
586 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
587 <                msg += 'Please check if the dataset is available at this site!)\n'
588 <            if self.cfg_params.has_key('EDG.ce_white_list'):
589 <                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
590 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
591 <                msg += 'Please check if the dataset is available at this site!)\n'
592 <
593 <            common.logger.message(msg)
594 <
595 <        self.list_of_args = list_of_lists
596 <        return
597 <
598 <    def jobSplittingNoInput(self):
599 <        """
600 <        Perform job splitting based on number of event per job
601 <        """
602 <        common.logger.debug(5,'Splitting per events')
603 <
604 <        if (self.selectEventsPerJob):
605 <            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
606 <        if (self.selectNumberOfJobs):
607 <            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
608 <        if (self.selectTotalNumberEvents):
609 <            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
610 <
611 <        if (self.total_number_of_events < 0):
612 <            msg='Cannot split jobs per Events with "-1" as total number of events'
613 <            raise CrabException(msg)
614 <
615 <        if (self.selectEventsPerJob):
616 <            if (self.selectTotalNumberEvents):
617 <                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
618 <            elif(self.selectNumberOfJobs) :
619 <                self.total_number_of_jobs =self.theNumberOfJobs
620 <                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
621 <
622 <        elif (self.selectNumberOfJobs) :
623 <            self.total_number_of_jobs = self.theNumberOfJobs
624 <            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
625 <
626 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
627 <
628 <        # is there any remainder?
629 <        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
630 <
631 <        common.logger.debug(5,'Check  '+str(check))
632 <
633 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
634 <        if check > 0:
635 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
636 <
637 <        # argument is seed number.$i
638 <        self.list_of_args = []
639 <        for i in range(self.total_number_of_jobs):
640 <            ## Since there is no input, any site is good
641 <            self.jobDestination.append([""]) #must be empty to write correctly the xml
642 <            args=[]
643 <            if (self.firstRun):
644 <                ## pythia first run
645 <                args.append(str(self.firstRun)+str(i))
646 <            self.list_of_args.append(args)
647 <
648 <        return
649 <
650 <
651 <    def jobSplittingForScript(self):
652 <        """
653 <        Perform job splitting based on number of job
654 <        """
655 <        common.logger.debug(5,'Splitting per job')
656 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
657 <
658 <        self.total_number_of_jobs = self.theNumberOfJobs
659 <
660 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
433 >        if self.ads or self.lumiMask:
434 >            common.logger.info("Requested (A)DS %s has %s block(s)." %
435 >                               (datasetPath, len(self.filesbyblock.keys())))
436 >        else:
437 >            common.logger.info("Requested dataset: " + datasetPath + \
438 >                " has " + str(self.maxEvents) + " events in " + \
439 >                str(len(self.filesbyblock.keys())) + " blocks.\n")
440  
441 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
441 >        return sites
442  
664        # argument is seed number.$i
665        self.list_of_args = []
666        for i in range(self.total_number_of_jobs):
667            self.jobDestination.append([""])
668            self.list_of_args.append([str(i)])
669        return
443  
444      def split(self, jobParams,firstJobID):
445  
446 <        njobs = self.total_number_of_jobs
447 <        arglist = self.list_of_args
446 >        jobParams = self.dict['args']
447 >        njobs = self.dict['njobs']
448 >        self.jobDestination = self.dict['jobDestination']
449 >
450 >        if njobs == 0:
451 >            raise CrabException("Asked to split zero jobs: aborting")
452 >        if not self.server and not self.local and njobs > 500:
453 >            raise CrabException("The CRAB client will not submit more than 500 jobs. You must use the server mode.")
454 >
455          # create the empty structure
456          for i in range(njobs):
457              jobParams.append("")
458  
459          listID=[]
460          listField=[]
461 +        listDictions=[]
462 +        exist= os.path.exists(self.argsFile)
463          for id in range(njobs):
464              job = id + int(firstJobID)
683            jobParams[id] = arglist[id]
465              listID.append(job+1)
466              job_ToSave ={}
467              concString = ' '
468              argu=''
469 +            str_argu = str(job+1)
470              if len(jobParams[id]):
471 <                argu +=   concString.join(jobParams[id] )
472 <            job_ToSave['arguments']= str(job+1)+' '+argu
471 >                argu = {'JobID': job+1}
472 >                for i in range(len(jobParams[id])):
473 >                    argu[self.dict['params'][i]]=jobParams[id][i]
474 >                    if len(jobParams[id])==1: self.NumEvents = jobParams[id][i]
475 >                # just for debug
476 >                str_argu += concString.join(jobParams[id])
477 >            if argu != '': listDictions.append(argu)
478 >            job_ToSave['arguments']= '%d %d'%( (job+1), 0)
479              job_ToSave['dlsDestination']= self.jobDestination[id]
480              listField.append(job_ToSave)
481 <            msg="Job "+str(job)+" Arguments:   "+str(job+1)+" "+argu+"\n"  \
482 <            +"                     Destination: "+str(self.jobDestination[id])
483 <            common.logger.debug(5,msg)
481 >            from ProdCommon.SiteDB.CmsSiteMapper import CmsSEMap
482 >            cms_se = CmsSEMap()
483 >            msg="Job  %s  Arguments:  %s\n"%(str(job+1),str_argu)
484 >            msg+="\t  Destination: %s "%(str(self.jobDestination[id]))
485 >            SEDestination = [cms_se[dest] for dest in self.jobDestination[id]]
486 >            msg+="\t  CMSDestination: %s "%(str(SEDestination))
487 >            common.logger.log(10-1,msg)
488 >        # write xml
489 >        if len(listDictions):
490 >            if exist==False: self.CreateXML()
491 >            self.addEntry(listDictions)
492          common._db.updateJob_(listID,listField)
493 <        self.argsList = (len(jobParams[0])+1)
493 >        return
494 >
495 >    def CreateXML(self):
496 >        """
497 >        """
498 >        result = IMProvNode( self.rootArgsFilename )
499 >        outfile = file( self.argsFile, 'w').write(str(result))
500 >        return
501  
502 +    def addEntry(self, listDictions):
503 +        """
504 +        _addEntry_
505 +
506 +        add an entry to the xml file
507 +        """
508 +        ## load xml
509 +        improvDoc = loadIMProvFile(self.argsFile)
510 +        entrname= 'Job'
511 +        for dictions in listDictions:
512 +           report = IMProvNode(entrname , None, **dictions)
513 +           improvDoc.addNode(report)
514 +        outfile = file( self.argsFile, 'w').write(str(improvDoc))
515          return
516  
517      def numberOfJobs(self):
518 <        return self.total_number_of_jobs
518 > #wmbs
519 >        if self.automation==0:
520 >           return self.dict['njobs']
521 >        else:
522 >           return None
523  
524      def getTarBall(self, exe):
525          """
526          Return the TarBall with lib and exe
527          """
528 <        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
528 >        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
529          if os.path.exists(self.tgzNameWithPath):
530              return self.tgzNameWithPath
531  
# Line 722 | Line 542 | class Cmssw(JobType):
542  
543          ## check if working area is release top
544          if swReleaseTop == '' or swArea == swReleaseTop:
545 <            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
545 >            common.logger.debug("swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
546              return
547  
548          import tarfile
# Line 737 | Line 557 | class Cmssw(JobType):
557                  ## then check if it's private or not
558                  if exeWithPath.find(swReleaseTop) == -1:
559                      # the exe is private, so we must ship
560 <                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
560 >                    common.logger.debug("Exe "+exeWithPath+" to be tarred")
561                      path = swArea+'/'
562                      # distinguish case when script is in user project area or given by full path somewhere else
563                      if exeWithPath.find(path) >= 0 :
# Line 751 | Line 571 | class Cmssw(JobType):
571                      pass
572  
573              ## Now get the libraries: only those in local working area
574 +            tar.dereference=True
575              libDir = 'lib'
576              lib = swArea+'/' +libDir
577 <            common.logger.debug(5,"lib "+lib+" to be tarred")
577 >            common.logger.debug("lib "+lib+" to be tarred")
578              if os.path.exists(lib):
579                  tar.add(lib,libDir)
580  
# Line 762 | Line 583 | class Cmssw(JobType):
583              module = swArea + '/' + moduleDir
584              if os.path.isdir(module):
585                  tar.add(module,moduleDir)
586 +            tar.dereference=False
587  
588              ## Now check if any data dir(s) is present
589              self.dataExist = False
# Line 775 | Line 597 | class Cmssw(JobType):
597                      todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
598                      if name == 'data':
599                          self.dataExist=True
600 <                        common.logger.debug(5,"data "+entry+" to be tarred")
600 >                        common.logger.debug("data "+entry+" to be tarred")
601                          tar.add(swArea+"/src/"+entry,"src/"+entry)
602                      pass
603                  pass
# Line 783 | Line 605 | class Cmssw(JobType):
605              ### CMSSW ParameterSet
606              if not self.pset is None:
607                  cfg_file = common.work_space.jobDir()+self.configFilename()
608 +                pickleFile = common.work_space.jobDir()+self.configFilename() + '.pkl'
609                  tar.add(cfg_file,self.configFilename())
610 <                common.logger.debug(5,"File added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
610 >                tar.add(pickleFile,self.configFilename() + '.pkl')
611  
612 +            try:
613 +                crab_cfg_file = common.work_space.shareDir()+'/crab.cfg'
614 +                tar.add(crab_cfg_file,'crab.cfg')
615 +            except:
616 +                pass
617  
618              ## Add ProdCommon dir to tar
619              prodcommonDir = './'
620              prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
621 <            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools','ProdCommon/Core','ProdCommon/MCPayloads', 'IMProv']
621 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools', \
622 >                           'ProdCommon/Core', 'ProdCommon/MCPayloads', 'IMProv', 'ProdCommon/Storage', \
623 >                           'WMCore/__init__.py','WMCore/Algorithms']
624              for file in neededStuff:
625                  tar.add(prodcommonPath+file,prodcommonDir+file)
796            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
626  
627              ##### ML stuff
628              ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
629              path=os.environ['CRABDIR'] + '/python/'
630              for file in ML_file_list:
631                  tar.add(path+file,file)
803            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
632  
633              ##### Utils
634 <            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py']
634 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py','cmscp.py']
635              for file in Utils_file_list:
636                  tar.add(path+file,file)
809            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
637  
638              ##### AdditionalFiles
639 +            tar.dereference=True
640              for file in self.additional_inbox_files:
641                  tar.add(file,string.split(file,'/')[-1])
642 <            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
642 >            tar.dereference=False
643 >            common.logger.log(10-1,"Files in "+self.tgzNameWithPath+" : "+str(tar.getnames()))
644  
645              tar.close()
646 <        except IOError:
647 <            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
648 <        except tarfile.TarError:
649 <            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
646 >        except IOError, exc:
647 >            msg = 'Could not create tar-ball %s \n'%self.tgzNameWithPath
648 >            msg += str(exc)
649 >            raise CrabException(msg)
650 >        except tarfile.TarError, exc:
651 >            msg = 'Could not create tar-ball %s \n'%self.tgzNameWithPath
652 >            msg += str(exc)
653 >            raise CrabException(msg)
654  
822        ## check for tarball size
655          tarballinfo = os.stat(self.tgzNameWithPath)
656          if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
657 <            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
657 >            if not self.server:
658 >                msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + \
659 >                         str(self.MaxTarBallSize) +'MB input sandbox limit \n'
660 >                msg += '      and not supported by the direct GRID submission system.\n'
661 >                msg += '      Please use the CRAB server mode by setting server_name=<NAME> in section [CRAB] of your crab.cfg.\n'
662 >                msg += '      For further infos please see https://twiki.cern.ch/twiki/bin/view/CMS/SWGuideCrabServerForUsers#Server_available_for_users'
663 >            else:
664 >                msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' +  \
665 >                        str(self.MaxTarBallSize) +'MB input sandbox limit in the server.'
666 >            raise CrabException(msg)
667  
668          ## create tar-ball with ML stuff
669  
# Line 831 | Line 672 | class Cmssw(JobType):
672          Returns part of a job script which prepares
673          the execution environment for the job 'nj'.
674          """
675 <        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
676 <            psetName = 'pset.py'
836 <        else:
837 <            psetName = 'pset.cfg'
675 >        psetName = 'pset.py'
676 >
677          # Prepare JobType-independent part
678          txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
679          txt += 'echo ">>> setup environment"\n'
680 <        txt += 'if [ $middleware == LCG ]; then \n'
680 >        txt += 'echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
681 >        txt += 'export SCRAM_ARCH=' + self.executable_arch + '\n'
682 >        txt += 'echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
683 >        txt += 'if [ $middleware == LCG ] || [ $middleware == CAF ] || [ $middleware == LSF ]; then \n'
684          txt += self.wsSetupCMSLCGEnvironment_()
685          txt += 'elif [ $middleware == OSG ]; then\n'
686          txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
# Line 853 | Line 695 | class Cmssw(JobType):
695          txt += '    cd $WORKING_DIR\n'
696          txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
697          txt += self.wsSetupCMSOSGEnvironment_()
698 +        #Setup SGE Environment
699 +        txt += 'elif [ $middleware == SGE ]; then\n'
700 +        txt += self.wsSetupCMSLCGEnvironment_()
701 +
702 +        txt += 'elif [ $middleware == ARC ]; then\n'
703 +        txt += self.wsSetupCMSLCGEnvironment_()
704 +
705 +        #Setup PBS Environment
706 +        txt += 'elif [ $middleware == PBS ]; then\n'
707 +        txt += self.wsSetupCMSLCGEnvironment_()
708 +
709          txt += 'fi\n'
710  
711          # Prepare JobType-specific part
# Line 868 | Line 721 | class Cmssw(JobType):
721          txt += '    func_exit\n'
722          txt += 'fi \n'
723          txt += 'cd '+self.version+'\n'
724 <        txt += 'SOFTWARE_DIR=`pwd`\n'
724 >        txt += 'SOFTWARE_DIR=`pwd`; export SOFTWARE_DIR\n'
725          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
726          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
727          txt += 'if [ $? != 0 ] ; then\n'
# Line 879 | Line 732 | class Cmssw(JobType):
732          txt += 'fi \n'
733          # Handle the arguments:
734          txt += "\n"
735 <        txt += "## number of arguments (first argument always jobnumber)\n"
735 >        txt += "## number of arguments (first argument always jobnumber, the second is the resubmission number)\n"
736          txt += "\n"
737          txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
738          txt += "then\n"
# Line 895 | Line 748 | class Cmssw(JobType):
748              txt += '\n'
749              txt += 'DatasetPath='+self.datasetPath+'\n'
750  
751 <            datasetpath_split = self.datasetPath.split("/")
752 <
900 <            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
901 <            txt += 'DataTier='+datasetpath_split[2]+'\n'
751 >            txt += 'PrimaryDataset='+self.primaryDataset +'\n'
752 >            txt += 'DataTier='+self.dataTier+'\n'
753              txt += 'ApplicationFamily=cmsRun\n'
754  
755          else:
# Line 908 | Line 759 | class Cmssw(JobType):
759              txt += 'ApplicationFamily=MCDataTier\n'
760          if self.pset != None:
761              pset = os.path.basename(job.configFilename())
762 +            pkl  = os.path.basename(job.configFilename()) + '.pkl'
763              txt += '\n'
764              txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
765 <            if (self.datasetPath): # standard job
914 <                txt += 'InputFiles=${args[1]}; export InputFiles\n'
915 <                if (self.useParent):  
916 <                    txt += 'ParentFiles=${args[2]}; export ParentFiles\n'
917 <                    txt += 'MaxEvents=${args[3]}; export MaxEvents\n'
918 <                    txt += 'SkipEvents=${args[4]}; export SkipEvents\n'
919 <                else:
920 <                    txt += 'MaxEvents=${args[2]}; export MaxEvents\n'
921 <                    txt += 'SkipEvents=${args[3]}; export SkipEvents\n'
922 <                txt += 'echo "Inputfiles:<$InputFiles>"\n'
923 <                if (self.useParent): txt += 'echo "ParentFiles:<$ParentFiles>"\n'
924 <                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
925 <                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
926 <            else:  # pythia like job
927 <                txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
928 <                txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
929 <                txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
930 <                txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
931 <                if (self.firstRun):
932 <                    txt += 'FirstRun=${args[1]}; export FirstRun\n'
933 <                    txt += 'echo "FirstRun: <$FirstRun>"\n'
765 >            txt += 'cp  $RUNTIME_AREA/'+pkl+' .\n'
766  
767 <            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
767 >            txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
768 >            txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
769 >            txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
770 >            txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
771  
772 <
773 <        if self.pset != None:
774 <            # FUTURE: Can simply for 2_1_x and higher
775 <            txt += '\n'
776 <            if self.debug_wrapper==True:
777 <                txt += 'echo "***** cat ' + psetName + ' *********"\n'
943 <                txt += 'cat ' + psetName + '\n'
944 <                txt += 'echo "****** end ' + psetName + ' ********"\n'
945 <                txt += '\n'
946 <            txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
947 <            txt += 'echo "PSETHASH = $PSETHASH" \n'
772 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
773 >            if self.var_filter:
774 >                #print "self.var_filter = ",self.var_filter
775 >                txt += "export var_filter="+"'"+self.var_filter+"'\n"
776 >                txt += 'echo $var_filter'
777 >        else:
778              txt += '\n'
779 +            if self.AdditionalArgs: txt += 'export AdditionalArgs=\"%s\"\n'%(self.AdditionalArgs)
780 +            if int(self.NumEvents) != 0: txt += 'export MaxEvents=%s\n'%str(self.NumEvents)
781          return txt
782  
783      def wsUntarSoftware(self, nj=0):
# Line 957 | Line 789 | class Cmssw(JobType):
789          txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
790  
791          if os.path.isfile(self.tgzNameWithPath):
792 <            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
793 <            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
794 <            if  self.debug_wrapper:
792 >            txt += 'echo ">>> tar xzf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
793 >            if  self.debug_wrapper==1 :
794 >                txt += 'tar zxvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
795                  txt += 'ls -Al \n'
796 +            else:
797 +                txt += 'tar zxf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
798              txt += 'untar_status=$? \n'
799              txt += 'if [ $untar_status -ne 0 ]; then \n'
800              txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
# Line 1001 | Line 835 | class Cmssw(JobType):
835          if len(self.additional_inbox_files)>0:
836              for file in self.additional_inbox_files:
837                  txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
1004        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
1005        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
838  
839          txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
840          txt += 'if [ -z "$PYTHONPATH" ]; then\n'
# Line 1013 | Line 845 | class Cmssw(JobType):
845          txt += 'fi\n'
846          txt += '\n'
847  
848 +        if self.pset != None:
849 +            psetName = 'pset.py'
850 +
851 +            txt += '\n'
852 +            if self.debug_wrapper == 1:
853 +                txt += 'echo "***** cat ' + psetName + ' *********"\n'
854 +                txt += 'cat ' + psetName + '\n'
855 +                txt += 'echo "****** end ' + psetName + ' ********"\n'
856 +                txt += '\n'
857 +                txt += 'echo "***********************" \n'
858 +                txt += 'which edmConfigHash \n'
859 +                txt += 'echo "***********************" \n'
860 +            txt += 'edmConfigHash ' + psetName + ' \n'
861 +            txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
862 +            txt += 'echo "PSETHASH = $PSETHASH" \n'
863 +            #### FEDE temporary fix for noEdm files #####
864 +            txt += 'if [ -z "$PSETHASH" ]; then \n'
865 +            txt += '   export PSETHASH=null\n'
866 +            txt += 'fi \n'
867 +            #############################################
868 +            txt += '\n'
869          return txt
870  
871  
# Line 1023 | Line 876 | class Cmssw(JobType):
876              return self.executable
877  
878      def executableArgs(self):
879 <        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
880 <        if self.scriptExe:#CarlosDaniele
1028 <            return   self.scriptExe + " $NJob"
879 >        if self.scriptExe:
880 >            return self.scriptExe + " $NJob $AdditionalArgs"
881          else:
882 <            ex_args = ""
1031 <            # FUTURE: This tests the CMSSW version. Can remove code as versions deprecated
1032 <            # Framework job report
1033 <            if (self.CMSSW_major >= 1 and self.CMSSW_minor >= 5) or (self.CMSSW_major >= 2):
1034 <                ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
1035 <            # Type of config file
1036 <            if self.CMSSW_major >= 2 :
1037 <                ex_args += " -p pset.py"
1038 <            else:
1039 <                ex_args += " -p pset.cfg"
1040 <            return ex_args
882 >            return " -j $RUNTIME_AREA/crab_fjr_$NJob.xml -p pset.py"
883  
884      def inputSandbox(self, nj):
885          """
# Line 1046 | Line 888 | class Cmssw(JobType):
888          inp_box = []
889          if os.path.isfile(self.tgzNameWithPath):
890              inp_box.append(self.tgzNameWithPath)
891 <        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
892 <        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
891 >        if os.path.isfile(self.argsFile):
892 >            inp_box.append(self.argsFile)
893 >        inp_box.append(common.work_space.jobDir() + self.scriptName)
894          return inp_box
895  
896      def outputSandbox(self, nj):
# Line 1071 | Line 914 | class Cmssw(JobType):
914          txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
915          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
916          txt += 'echo ">>> current directory content:"\n'
917 <        if self.debug_wrapper:
917 >        if self.debug_wrapper==1:
918              txt += 'ls -Al\n'
919          txt += '\n'
920  
921          for fileWithSuffix in (self.output_file):
922 <            output_file_num = numberFile(fileWithSuffix, '$NJob')
922 >            output_file_num = numberFile(fileWithSuffix, '$OutUniqueID')
923              txt += '\n'
924              txt += '# check output file\n'
925              txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
# Line 1097 | Line 940 | class Cmssw(JobType):
940              txt += 'fi\n'
941          file_list = []
942          for fileWithSuffix in (self.output_file):
943 <             file_list.append(numberFile(fileWithSuffix, '$NJob'))
943 >             file_list.append(numberFile('$SOFTWARE_DIR/'+fileWithSuffix, '$OutUniqueID'))
944  
945 <        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
945 >        txt += 'file_list="'+string.join(file_list,',')+'"\n'
946          txt += '\n'
947          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
948          txt += 'echo ">>> current directory content:"\n'
949 <        if self.debug_wrapper:
949 >        if self.debug_wrapper==1:
950              txt += 'ls -Al\n'
951          txt += '\n'
952          txt += 'cd $RUNTIME_AREA\n'
# Line 1125 | Line 968 | class Cmssw(JobType):
968                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
969  
970          req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
971 <        if common.scheduler.name() == "glitecoll":
972 <            req += ' && other.GlueCEStateStatus == "Production" '
971 >        if ( common.scheduler.name() in ["glite"] ):
972 >            ## 25-Jun-2009 SL: patch to use Cream enabled WMS
973 >            if ( self.cfg_params.get('GRID.use_cream',None) ):
974 >                req += ' && (other.GlueCEStateStatus == "Production" || other.GlueCEStateStatus == "Special")'
975 >            else:
976 >                req += ' && other.GlueCEStateStatus == "Production" '
977  
978          return req
979  
980      def configFilename(self):
981          """ return the config filename """
982 <        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
1136 <        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
1137 <          return self.name()+'.py'
1138 <        else:
1139 <          return self.name()+'.cfg'
982 >        return self.name()+'.py'
983  
984      def wsSetupCMSOSGEnvironment_(self):
985          """
# Line 1196 | Line 1039 | class Cmssw(JobType):
1039          txt += '    echo "==> setup cms environment ok"\n'
1040          return txt
1041  
1042 <    def modifyReport(self, nj):
1042 >    def wsModifyReport(self, nj):
1043          """
1044          insert the part of the script that modifies the FrameworkJob Report
1045          """
1203        txt = '\n#Written by cms_cmssw::modifyReport\n'
1204        publish_data = int(self.cfg_params.get('USER.publish_data',0))
1205        if (publish_data == 1):
1206            processedDataset = self.cfg_params['USER.publish_data_name']
1207            LFNBaseName = LFNBase(processedDataset)
1046  
1047 <            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
1048 <            txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
1049 <            txt += 'else\n'
1212 <            txt += '    FOR_LFN=/copy_problems/ \n'
1213 <            txt += '    SE=""\n'
1214 <            txt += '    SE_PATH=""\n'
1215 <            txt += 'fi\n'
1047 >        txt = ''
1048 >        if (self.copy_data == 1):
1049 >            txt = '\n#Written by cms_cmssw::wsModifyReport\n'
1050  
1051              txt += 'echo ">>> Modify Job Report:" \n'
1052              txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1219            txt += 'ProcessedDataset='+processedDataset+'\n'
1220            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1221            txt += 'echo "SE = $SE"\n'
1222            txt += 'echo "SE_PATH = $SE_PATH"\n'
1223            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1053              txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1054 <            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1055 <            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1054 >
1055 >            args = 'fjr $RUNTIME_AREA/crab_fjr_$NJob.xml json $RUNTIME_AREA/resultCopyFile n_job $OutUniqueID PrimaryDataset $PrimaryDataset  ApplicationFamily $ApplicationFamily ApplicationName $executable cmssw_version $CMSSW_VERSION psethash $PSETHASH'
1056 >
1057 >            if (self.publish_data == 1):
1058 >                txt += 'ProcessedDataset='+self.processedDataset+'\n'
1059 >                txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1060 >                args += ' UserProcessedDataset $USER-$ProcessedDataset-$PSETHASH'
1061 >
1062 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'"\n'
1063 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'\n'
1064              txt += 'modifyReport_result=$?\n'
1065              txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1066              txt += '    modifyReport_result=70500\n'
# Line 1244 | Line 1081 | class Cmssw(JobType):
1081          txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1082          txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1083          txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1084 <        if self.debug_wrapper :
1084 >        if self.debug_wrapper==1 :
1085              txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1086          txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1087          txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1088          txt += '            echo ">>> crab_fjr.xml contents: "\n'
1089 <        txt += '            cat $RUNTIME_AREA/crab_fjr_NJob.xml\n'
1089 >        txt += '            cat $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1090          txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1091          txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1092          txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
# Line 1260 | Line 1097 | class Cmssw(JobType):
1097          txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1098          txt += '    fi\n'
1099            #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1100 <
1101 <        if (self.datasetPath and self.dataset_pu == 'NONE'):
1102 <          # VERIFY PROCESSED DATA
1266 <            txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1267 <            txt += '      echo ">>> Verify list of processed files:"\n'
1268 <            txt += '      echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1269 <            txt += '      python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1270 <            txt += '      cat input-files.txt  | sort | uniq > tmp.txt\n'
1271 <            txt += '      mv tmp.txt input-files.txt\n'
1272 <            txt += '      echo "cat input-files.txt"\n'
1273 <            txt += '      echo "----------------------"\n'
1274 <            txt += '      cat input-files.txt\n'
1275 <            txt += '      cat processed-files.txt | sort | uniq > tmp.txt\n'
1276 <            txt += '      mv tmp.txt processed-files.txt\n'
1277 <            txt += '      echo "----------------------"\n'
1278 <            txt += '      echo "cat processed-files.txt"\n'
1279 <            txt += '      echo "----------------------"\n'
1280 <            txt += '      cat processed-files.txt\n'
1281 <            txt += '      echo "----------------------"\n'
1282 <            txt += '      diff -q input-files.txt processed-files.txt\n'
1283 <            txt += '      fileverify_status=$?\n'
1284 <            txt += '      if [ $fileverify_status -ne 0 ]; then\n'
1285 <            txt += '         executable_exit_status=30001\n'
1286 <            txt += '         echo "ERROR ==> not all input files processed"\n'
1287 <            txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1288 <            txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1289 <            txt += '      fi\n'
1290 <            txt += '    fi\n'
1291 <            txt += '\n'
1100 >        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1101 >        txt += '        echo ">>> Executable succeded  $executable_exit_status"\n'
1102 >        txt += '    fi\n'
1103          txt += 'else\n'
1104          txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1105          txt += 'fi\n'
1106          txt += '\n'
1107 +        txt += 'if [ $executable_exit_status -ne 0 ];then\n'
1108 +        txt += '    echo ">>> Executable failed  $executable_exit_status"\n'
1109 +        txt += '    echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1110 +        txt += '    echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1111 +        txt += '    job_exit_code=$executable_exit_status\n'
1112 +        txt += '    func_exit\n'
1113 +        txt += 'fi\n\n'
1114          txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1115          txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1116          txt += 'job_exit_code=$executable_exit_status\n'
# Line 1305 | Line 1123 | class Cmssw(JobType):
1123      def getParams(self):
1124          return self._params
1125  
1126 <    def uniquelist(self, old):
1309 <        """
1310 <        remove duplicates from a list
1311 <        """
1312 <        nd={}
1313 <        for e in old:
1314 <            nd[e]=0
1315 <        return nd.keys()
1316 <
1317 <    def outList(self):
1126 >    def outList(self,list=False):
1127          """
1128          check the dimension of the output files
1129          """
# Line 1323 | Line 1132 | class Cmssw(JobType):
1132          listOutFiles = []
1133          stdout = 'CMSSW_$NJob.stdout'
1134          stderr = 'CMSSW_$NJob.stderr'
1135 +        if len(self.output_file) <= 0:
1136 +            msg ="WARNING: no output files name have been defined!!\n"
1137 +            msg+="\tno output files will be reported back/staged\n"
1138 +            common.logger.info(msg)
1139 +
1140          if (self.return_data == 1):
1141 <            for file in (self.output_file+self.output_file_sandbox):
1142 <                listOutFiles.append(numberFile(file, '$NJob'))
1143 <            listOutFiles.append(stdout)
1144 <            listOutFiles.append(stderr)
1145 <        else:
1146 <            for file in (self.output_file_sandbox):
1147 <                listOutFiles.append(numberFile(file, '$NJob'))
1334 <            listOutFiles.append(stdout)
1335 <            listOutFiles.append(stderr)
1141 >            for file in (self.output_file):
1142 >                listOutFiles.append(numberFile(file, '$OutUniqueID'))
1143 >        for file in (self.output_file_sandbox):
1144 >            listOutFiles.append(numberFile(file, '$NJob'))
1145 >        listOutFiles.append(stdout)
1146 >        listOutFiles.append(stderr)
1147 >
1148          txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1149          txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1150          txt += 'export filesToCheck\n'
1151 +        taskinfo={}
1152 +        taskinfo['outfileBasename'] = self.output_file
1153 +        common._db.updateTask_(taskinfo)
1154 +
1155 +        if list : return self.output_file
1156          return txt
1157 +
1158 +    def checkCMSSWVersion(self, url = "https://cmstags.cern.ch/cgi-bin/CmsTC/", fileName = "ReleasesXML"):
1159 +        """
1160 +        compare current CMSSW release and arch with allowed releases
1161 +        """
1162 +
1163 +        downloader = Downloader(url)
1164 +        goodRelease = False
1165 +
1166 +        try:
1167 +            result = downloader.config(fileName)
1168 +        except:
1169 +            common.logger.info("ERROR: Problem reading file of allowed CMSSW releases.")
1170 +
1171 +        try:
1172 +            events = pulldom.parseString(result)
1173 +
1174 +            arch     = None
1175 +            release  = None
1176 +            relType  = None
1177 +            relState = None
1178 +            for (event, node) in events:
1179 +                if event == pulldom.START_ELEMENT:
1180 +                    if node.tagName == 'architecture':
1181 +                        arch = node.attributes.getNamedItem('name').nodeValue
1182 +                    if node.tagName == 'project':
1183 +                        relType = node.attributes.getNamedItem('type').nodeValue
1184 +                        relState = node.attributes.getNamedItem('state').nodeValue
1185 +                        if relType == 'Production' and relState == 'Announced':
1186 +                            release = node.attributes.getNamedItem('label').nodeValue
1187 +                if self.executable_arch == arch and self.version == release:
1188 +                    goodRelease = True
1189 +                    return goodRelease
1190 +
1191 +            if not goodRelease:
1192 +                msg = "WARNING: %s on %s is not a supported release. " % \
1193 +                        (self.version, self.executable_arch)
1194 +                msg += "Submission may fail."
1195 +                common.logger.info(msg)
1196 +        except:
1197 +            common.logger.info("Problems parsing file of allowed CMSSW releases.")
1198 +
1199 +        return goodRelease
1200 +

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines