ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.231 by spiga, Mon Aug 25 16:13:05 2008 UTC vs.
Revision 1.376 by fanzago, Fri Aug 5 15:36:10 2011 UTC

# Line 1 | Line 1
1 +
2 + __revision__ = "$Id$"
3 + __version__ = "$Revision$"
4 +
5   from JobType import JobType
2 from crab_logger import Logger
6   from crab_exceptions import *
7   from crab_util import *
5 from BlackWhiteListParser import SEBlackWhiteListParser
8   import common
9 + import re
10   import Scram
11 < from LFNBaseName import *
11 > from Splitter import JobSplitter
12 > from Downloader import Downloader
13 > try:
14 >    import json
15 > except:
16 >    import simplejson as json
17  
18 + from IMProv.IMProvNode import IMProvNode
19 + from IMProv.IMProvLoader import loadIMProvFile
20   import os, string, glob
21 + from xml.dom import pulldom
22  
23   class Cmssw(JobType):
24      def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
25          JobType.__init__(self, 'CMSSW')
26 <        common.logger.debug(3,'CMSSW::__init__')
26 >        common.logger.debug('CMSSW::__init__')
27          self.skip_blocks = skip_blocks
28 <
29 <        self.argsList = []
19 <
28 >        self.argsList = 2
29 >        self.NumEvents=0
30          self._params = {}
31          self.cfg_params = cfg_params
32 <        # init BlackWhiteListParser
33 <        self.blackWhiteListParser = SEBlackWhiteListParser(cfg_params)
32 >        ### FOR MULTI ###
33 >        self.var_filter=''
34  
35 <        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
35 >        ### Temporary patch to automatically skip the ISB size check:
36 >        self.server = self.cfg_params.get('CRAB.server_name',None) or \
37 >                      self.cfg_params.get('CRAB.use_server',0)
38 >        self.local  = common.scheduler.name().upper() in ['LSF','CAF','CONDOR','SGE','PBS']
39 >        size = 9.5
40 >        if self.server :
41 >            size = 100
42 >        elif self.local:
43 >            size = 9999999
44 >        self.MaxTarBallSize = float(self.cfg_params.get('GRID.maxtarballsize',size))
45  
46          # number of jobs requested to be created, limit obj splitting
47          self.ncjobs = ncjobs
48  
30        log = common.logger
31
49          self.scram = Scram.Scram(cfg_params)
50          self.additional_inbox_files = []
51          self.scriptExe = ''
# Line 39 | Line 56 | class Cmssw(JobType):
56          self.pset = ''
57          self.datasetPath = ''
58  
59 +        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
60          # set FJR file name
61          self.fjrFileName = 'crab_fjr.xml'
62  
63          self.version = self.scram.getSWVersion()
64 +        common.logger.log(10-1,"CMSSW version is: "+str(self.version))
65          version_array = self.version.split('_')
66          self.CMSSW_major = 0
67          self.CMSSW_minor = 0
# Line 55 | Line 74 | class Cmssw(JobType):
74              msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
75              raise CrabException(msg)
76  
77 <        ### collect Data cards
78 <
60 <        if not cfg_params.has_key('CMSSW.datasetpath'):
61 <            msg = "Error: datasetpath not defined "
77 >        if self.CMSSW_major < 2 or (self.CMSSW_major == 2 and self.CMSSW_minor < 1):
78 >            msg = "CRAB supports CMSSW >= 2_1_x only. Use an older CRAB version."
79              raise CrabException(msg)
80 +            """
81 +            As CMSSW versions are dropped we can drop more code:
82 +            2.x dropped: drop check for lumi range setting
83 +            """
84 +        self.checkCMSSWVersion()
85 +        ### collect Data cards
86  
87          ### Temporary: added to remove input file control in the case of PU
88          self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
89  
90 <        tmp =  cfg_params['CMSSW.datasetpath']
91 <        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
92 <        if string.lower(tmp)=='none':
93 <            self.datasetPath = None
94 <            self.selectNoInput = 1
95 <        else:
96 <            self.datasetPath = tmp
97 <            self.selectNoInput = 0
90 >        if not cfg_params.has_key('CMSSW.datasetpath'):
91 >            msg = "Error: datasetpath not defined in the section [CMSSW] of crab.cfg file "
92 >            raise CrabException(msg)
93 >        else:    
94 >            tmp =  cfg_params['CMSSW.datasetpath']
95 >            common.logger.log(10-1, "CMSSW::CMSSW(): datasetPath = "+tmp)
96 >            if string.lower(tmp)=='none':
97 >                self.datasetPath = None
98 >                self.selectNoInput = 1
99 >                self.primaryDataset = 'null'
100 >            else:
101 >                self.datasetPath = tmp
102 >                self.selectNoInput = 0
103 >                ll = len(self.datasetPath.split("/"))
104 >                if (ll < 4):
105 >                    msg = 'Your datasetpath has a invalid format ' + self.datasetPath + '\n'
106 >                    msg += 'Expected a path in format /PRIMARY/PROCESSED/TIER1-TIER2 or /PRIMARY/PROCESSED/TIER/METHOD for ADS'
107 >                    raise CrabException(msg)
108 >                self.primaryDataset = self.datasetPath.split("/")[1]
109 >                self.dataTier = self.datasetPath.split("/")[2]
110 >
111 >        # Analysis dataset is primary/processed/tier/definition
112 >        self.ads = False
113 >        if self.datasetPath:
114 >            self.ads = len(self.datasetPath.split("/")) > 4
115 >        self.lumiMask = self.cfg_params.get('CMSSW.lumi_mask',None)
116 >        self.lumiParams = self.cfg_params.get('CMSSW.total_number_of_lumis',None) or \
117 >                          self.cfg_params.get('CMSSW.lumis_per_job',None)
118 >
119 >        # FUTURE: Can remove this check
120 >        if self.ads and self.CMSSW_major < 3:
121 >            common.logger.info('Warning: Analysis dataset support is incomplete in CMSSW 2_x.')
122 >            common.logger.info('  Only file level, not lumi level, granularity is supported.')
123 >
124 >        self.debugWrap=''
125 >        self.debug_wrapper = int(cfg_params.get('USER.debug_wrapper',0))
126 >        if self.debug_wrapper == 1: self.debugWrap='--debug'
127  
76        self.dataTiers = []
77        self.debugWrap = ''
78        self.debug_wrapper = cfg_params.get('USER.debug_wrapper',False)
79        if self.debug_wrapper: self.debugWrap='--debug'
128          ## now the application
129 +        self.managedGenerators = ['madgraph', 'comphep', 'lhe']
130 +        self.generator = cfg_params.get('CMSSW.generator','pythia').lower()
131          self.executable = cfg_params.get('CMSSW.executable','cmsRun')
132 <        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
132 >        common.logger.log(10-1, "CMSSW::CMSSW(): executable = "+self.executable)
133  
134          if not cfg_params.has_key('CMSSW.pset'):
135              raise CrabException("PSet file missing. Cannot run cmsRun ")
136          self.pset = cfg_params['CMSSW.pset']
137 <        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
137 >        common.logger.log(10-1, "Cmssw::Cmssw(): PSet file = "+self.pset)
138          if self.pset.lower() != 'none' :
139              if (not os.path.exists(self.pset)):
140                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
# Line 105 | Line 155 | class Cmssw(JobType):
155          if tmp :
156              self.output_file = [x.strip() for x in tmp.split(',')]
157              outfileflag = True #output found
108        #else:
109        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
158  
111        # script_exe file as additional file in inputSandbox
159          self.scriptExe = cfg_params.get('USER.script_exe',None)
160          if self.scriptExe :
161              if not os.path.isfile(self.scriptExe):
# Line 116 | Line 163 | class Cmssw(JobType):
163                  raise CrabException(msg)
164              self.additional_inbox_files.append(string.strip(self.scriptExe))
165  
166 +        self.AdditionalArgs = cfg_params.get('USER.script_arguments',None)
167 +        if self.AdditionalArgs : self.AdditionalArgs = string.replace(self.AdditionalArgs,',',' ')
168 +
169          if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
170              msg ="Error. script_exe  not defined"
171              raise CrabException(msg)
172  
173          # use parent files...
174 <        self.useParent = self.cfg_params.get('CMSSW.use_parent',False)
174 >        self.useParent = int(self.cfg_params.get('CMSSW.use_parent',0))
175  
176          ## additional input files
177          if cfg_params.has_key('USER.additional_input_files'):
# Line 144 | Line 194 | class Cmssw(JobType):
194                      self.additional_inbox_files.append(string.strip(file))
195                  pass
196              pass
197 <            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
197 >            common.logger.debug("Additional input files: "+str(self.additional_inbox_files))
198          pass
199  
150        ## Events per job
151        if cfg_params.has_key('CMSSW.events_per_job'):
152            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
153            self.selectEventsPerJob = 1
154        else:
155            self.eventsPerJob = -1
156            self.selectEventsPerJob = 0
157
158        ## number of jobs
159        if cfg_params.has_key('CMSSW.number_of_jobs'):
160            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
161            self.selectNumberOfJobs = 1
162        else:
163            self.theNumberOfJobs = 0
164            self.selectNumberOfJobs = 0
165
166        if cfg_params.has_key('CMSSW.total_number_of_events'):
167            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
168            self.selectTotalNumberEvents = 1
169            if self.selectNumberOfJobs  == 1:
170                if (self.total_number_of_events != -1) and int(self.total_number_of_events) < int(self.theNumberOfJobs):
171                    msg = 'Must specify at least one event per job. total_number_of_events > number_of_jobs '
172                    raise CrabException(msg)
173        else:
174            self.total_number_of_events = 0
175            self.selectTotalNumberEvents = 0
176
177        if self.pset != None:
178             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
179                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
180                 raise CrabException(msg)
181        else:
182             if (self.selectNumberOfJobs == 0):
183                 msg = 'Must specify  number_of_jobs.'
184                 raise CrabException(msg)
200  
201          ## New method of dealing with seeds
202          self.incrementSeeds = []
# Line 197 | Line 212 | class Cmssw(JobType):
212                  tmp.strip()
213                  self.incrementSeeds.append(tmp)
214  
215 <        ## FUTURE: Can remove in CRAB 2.4.0
201 <        self.sourceSeed    = cfg_params.get('CMSSW.pythia_seed',None)
202 <        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
203 <        self.sourceSeedG4  = cfg_params.get('CMSSW.g4_seed',None)
204 <        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
205 <        if self.sourceSeed or self.sourceSeedVtx or self.sourceSeedG4 or self.sourceSeedMix:
206 <            msg = 'pythia_seed, vtx_seed, g4_seed, and mix_seed are no longer valid settings. You must use increment_seeds or preserve_seeds'
207 <            raise CrabException(msg)
208 <
209 <        self.firstRun = cfg_params.get('CMSSW.first_run',None)
210 <
211 <        # Copy/return
215 >        # Copy/return/publish
216          self.copy_data = int(cfg_params.get('USER.copy_data',0))
217          self.return_data = int(cfg_params.get('USER.return_data',0))
218 +        self.publish_data = int(cfg_params.get('USER.publish_data',0))
219 +        if (self.publish_data == 1):
220 +            if not cfg_params.has_key('USER.publish_data_name'):
221 +                raise CrabException('Cannot publish output data, because you did not specify USER.publish_data_name parameter in the crab.cfg file')
222 +            else:
223 +                self.processedDataset = cfg_params['USER.publish_data_name']
224  
225 +        self.conf = {}
226 +        self.conf['pubdata'] = None
227 +        # number of jobs requested to be created, limit obj splitting DD
228          #DBSDLS-start
229          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
230          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
# Line 220 | Line 233 | class Cmssw(JobType):
233          ## Perform the data location and discovery (based on DBS/DLS)
234          ## SL: Don't if NONE is specified as input (pythia use case)
235          blockSites = {}
236 <        if self.datasetPath:
237 <            blockSites = self.DataDiscoveryAndLocation(cfg_params)
238 <        #DBSDLS-end
236 > #wmbs
237 >        self.automation = int(self.cfg_params.get('WMBS.automation',0))
238 >        if self.automation == 0:
239 >            if self.datasetPath:
240 >                blockSites = self.DataDiscoveryAndLocation(cfg_params)
241 >            #DBSDLS-end
242 >            self.conf['blockSites']=blockSites
243 >
244 >            ## Select Splitting
245 >            splitByRun = int(cfg_params.get('CMSSW.split_by_run',0))
246 >
247 >            if self.selectNoInput:
248 >                if self.pset == None:
249 >                    self.algo = 'ForScript'
250 >                else:
251 >                    self.algo = 'NoInput'
252 >                    self.conf['managedGenerators']=self.managedGenerators
253 >                    self.conf['generator']=self.generator
254 >            elif self.ads or self.lumiMask or self.lumiParams:
255 >                self.algo = 'LumiBased'
256 >                if splitByRun:
257 >                    msg = "Cannot combine split by run with lumi_mask, ADS, " \
258 >                          "or lumis_per_job. Use split by lumi mode instead."
259 >                    raise CrabException(msg)
260  
261 <        ## Select Splitting
262 <        if self.selectNoInput:
229 <            if self.pset == None:
230 <                self.jobSplittingForScript()
261 >            elif splitByRun ==1:
262 >                self.algo = 'RunBased'
263              else:
264 <                self.jobSplittingNoInput()
265 <        else:
234 <            self.jobSplittingByBlocks(blockSites)
264 >                self.algo = 'EventBased'
265 >            common.logger.debug("Job splitting method: %s" % self.algo)
266  
267 +            splitter = JobSplitter(self.cfg_params,self.conf)
268 +            self.dict = splitter.Algos()[self.algo]()
269 +
270 +        self.argsFile= '%s/arguments.xml'%common.work_space.shareDir()
271 +        self.rootArgsFilename= 'arguments'
272          # modify Pset only the first time
273          if isNew:
274 <            if self.pset != None:
275 <                import PsetManipulator as pp
240 <                PsetEdit = pp.PsetManipulator(self.pset)
241 <                try:
242 <                    # Add FrameworkJobReport to parameter-set, set max events.
243 <                    # Reset later for data jobs by writeCFG which does all modifications
244 <                    PsetEdit.addCrabFJR(self.fjrFileName) # FUTURE: Job report addition not needed by CMSSW>1.5
245 <                    PsetEdit.maxEvent(self.eventsPerJob)
246 <                    PsetEdit.psetWriter(self.configFilename())
247 <                    ## If present, add TFileService to output files
248 <                    if not int(cfg_params.get('CMSSW.skip_TFileService_output',0)):
249 <                        tfsOutput = PsetEdit.getTFileService()
250 <                        if tfsOutput:
251 <                            if tfsOutput in self.output_file:
252 <                                common.logger.debug(5,"Output from TFileService "+tfsOutput+" already in output files")
253 <                            else:
254 <                                outfileflag = True #output found
255 <                                self.output_file.append(tfsOutput)
256 <                                common.logger.message("Adding "+tfsOutput+" to output files (from TFileService)")
257 <                            pass
258 <                        pass
259 <                    ## If present and requested, add PoolOutputModule to output files
260 <                    if int(cfg_params.get('CMSSW.get_edm_output',0)):
261 <                        edmOutput = PsetEdit.getPoolOutputModule()
262 <                        if edmOutput:
263 <                            if edmOutput in self.output_file:
264 <                                common.logger.debug(5,"Output from PoolOutputModule "+edmOutput+" already in output files")
265 <                            else:
266 <                                self.output_file.append(edmOutput)
267 <                                common.logger.message("Adding "+edmOutput+" to output files (from PoolOutputModule)")
268 <                            pass
269 <                        pass
270 <                except CrabException:
271 <                    msg='Error while manipulating ParameterSet: exiting...'
272 <                    raise CrabException(msg)
274 >            if self.pset != None: self.ModifyPset()
275 >
276              ## Prepare inputSandbox TarBall (only the first time)
277 <            self.tgzNameWithPath = self.getTarBall(self.executable)
277 >            self.tarNameWithPath = self.getTarBall(self.executable)
278 >
279 >
280 >    def ModifyPset(self):
281 >        import PsetManipulator as pp
282 >
283 >        # If pycfg_params set, fake out the config script
284 >        # to make it think it was called with those args
285 >        pycfg_params = self.cfg_params.get('CMSSW.pycfg_params',None)
286 >        if pycfg_params:
287 >            trueArgv = sys.argv
288 >            sys.argv = [self.pset]
289 >            sys.argv.extend(pycfg_params.split(' '))
290 >        PsetEdit = pp.PsetManipulator(self.pset)
291 >        if pycfg_params: # Restore original sys.argv
292 >            sys.argv = trueArgv
293 >
294 >        try:
295 >            # Add FrameworkJobReport to parameter-set, set max events.
296 >            # Reset later for data jobs by writeCFG which does all modifications
297 >            PsetEdit.maxEvent(1)
298 >            PsetEdit.skipEvent(0)
299 >            PsetEdit.psetWriter(self.configFilename())
300 >            ## If present, add TFileService to output files
301 >            if not int(self.cfg_params.get('CMSSW.skip_tfileservice_output',0)):
302 >                tfsOutput = PsetEdit.getTFileService()
303 >                if tfsOutput:
304 >                    if tfsOutput in self.output_file:
305 >                        common.logger.debug("Output from TFileService "+tfsOutput+" already in output files")
306 >                    else:
307 >                        outfileflag = True #output found
308 >                        self.output_file.append(tfsOutput)
309 >                        common.logger.info("Adding "+tfsOutput+" (from TFileService) to list of output files")
310 >                    pass
311 >                pass
312 >
313 >            # If requested, add PoolOutputModule to output files
314 >            ### FOR MULTI ###
315 >            #edmOutput = PsetEdit.getPoolOutputModule()
316 >            edmOutputDict = PsetEdit.getPoolOutputModule()
317 >            common.logger.debug("(test) edmOutputDict = "+str(edmOutputDict))
318 >            filter_dict = {}
319 >            for key in edmOutputDict.keys():
320 >                filter_dict[key]=edmOutputDict[key]['dataset']
321 >            common.logger.debug("(test) filter_dict for multi =  "+str(filter_dict))
322 >
323 >            #### in CMSSW.sh: export var_filter
324 >
325 >            self.var_filter = json.dumps(filter_dict)
326 >            common.logger.debug("(test) var_filter for multi =  "+self.var_filter)
327 >
328 >            edmOutput = edmOutputDict.keys()
329 >            if int(self.cfg_params.get('CMSSW.get_edm_output',0)):
330 >                if edmOutput:
331 >                    for outputFile in edmOutput:
332 >                        if outputFile in self.output_file:
333 >                            common.logger.debug("Output from PoolOutputModule "+outputFile+" already in output files")
334 >                        else:
335 >                            self.output_file.append(outputFile)
336 >                            common.logger.info("Adding "+outputFile+" (from PoolOutputModule) to list of output files")
337 >            # not requested, check anyhow to avoid accidental T2 overload
338 >            else:
339 >                if edmOutput:
340 >                    missedFiles = []
341 >                    for outputFile in edmOutput:
342 >                        if outputFile not in self.output_file:
343 >                            missedFiles.append(outputFile)
344 >                    if missedFiles:
345 >                        msg  = "ERROR: PoolOutputModule(s) are present in your ParameteSet %s \n"%self.pset
346 >                        msg += "    but the file(s) produced ( %s ) are not in the list of output files\n" % ', '.join(missedFiles)
347 >                        msg += "WARNING: please remove them. If you want to keep them, add the file(s) to output_files or use CMSSW.get_edm_output = 1\n"
348 >                        if int(self.cfg_params.get('CMSSW.ignore_edm_output',0)):
349 >                            msg += "    CMSSW.ignore_edm_output==1 : Hope you know what you are doing...\n"
350 >                            common.logger.info(msg)
351 >                        else :
352 >                            raise CrabException(msg)
353 >
354 >            if (PsetEdit.getBadFilesSetting()):
355 >                msg = "WARNING: You have set skipBadFiles to True. This will continue processing on some errors and you may not be notified."
356 >                common.logger.info(msg)
357 >
358 >        except CrabException, msg:
359 >            common.logger.info(str(msg))
360 >            msg='Error while manipulating ParameterSet (see previous message, if any): exiting...'
361 >            raise CrabException(msg)
362 >
363 >        valid = re.compile('^[\w\.\-]+$')
364 >        for fileName in self.output_file:
365 >            if not valid.match(fileName):
366 >                msg = "The file %s may only contain alphanumeric characters and -, _, ." % fileName
367 >                raise CrabException(msg)
368 >
369  
370      def DataDiscoveryAndLocation(self, cfg_params):
371  
372          import DataDiscovery
373          import DataLocation
374 <        common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
374 >        common.logger.log(10-1,"CMSSW::DataDiscoveryAndLocation()")
375  
376          datasetPath=self.datasetPath
377  
378          ## Contact the DBS
379 <        common.logger.message("Contacting Data Discovery Services ...")
379 >        common.logger.info("Contacting Data Discovery Services ...")
380          try:
381              self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
382              self.pubdata.fetchDBSInfo()
# Line 298 | Line 392 | class Cmssw(JobType):
392              raise CrabException(msg)
393  
394          self.filesbyblock=self.pubdata.getFiles()
395 <        self.eventsbyblock=self.pubdata.getEventsPerBlock()
302 <        self.eventsbyfile=self.pubdata.getEventsPerFile()
303 <        self.parentFiles=self.pubdata.getParent()
395 >        self.conf['pubdata']=self.pubdata
396  
397          ## get max number of events
398          self.maxEvents=self.pubdata.getMaxEvents()
# Line 309 | Line 401 | class Cmssw(JobType):
401          try:
402              dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
403              dataloc.fetchDLSInfo()
404 +
405          except DataLocation.DataLocationError , ex:
406              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
407              raise CrabException(msg)
408  
409  
410 <        sites = dataloc.getSites()
410 >        unsorted_sites = dataloc.getSites()
411 >        sites = self.filesbyblock.fromkeys(self.filesbyblock,'')
412 >        for lfn in self.filesbyblock.keys():
413 >            if unsorted_sites.has_key(lfn):
414 >                sites[lfn]=unsorted_sites[lfn]
415 >            else:
416 >                sites[lfn]=[]
417 >
418 >        if len(sites)==0:
419 >            msg = 'ERROR ***: no location for any of the blocks of this dataset: \n\t %s \n'%datasetPath
420 >            msg += "\tMaybe the dataset is located only at T1's (or at T0), where analysis jobs are not allowed\n"
421 >            msg += "\tPlease check DataDiscovery page https://cmsweb.cern.ch/dbs_discovery/\n"
422 >            raise CrabException(msg)
423 >
424          allSites = []
425          listSites = sites.values()
426          for listSite in listSites:
427              for oneSite in listSite:
428                  allSites.append(oneSite)
429 <        allSites = self.uniquelist(allSites)
324 <
325 <        # screen output
326 <        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
327 <
328 <        return sites
329 <
330 <    def jobSplittingByBlocks(self, blockSites):
331 <        """
332 <        Perform job splitting. Jobs run over an integer number of files
333 <        and no more than one block.
334 <        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
335 <        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
336 <                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
337 <                  self.maxEvents, self.filesbyblock
338 <        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
339 <              self.total_number_of_jobs - Total # of jobs
340 <              self.list_of_args - File(s) job will run on (a list of lists)
341 <        """
342 <
343 <        # ---- Handle the possible job splitting configurations ---- #
344 <        if (self.selectTotalNumberEvents):
345 <            totalEventsRequested = self.total_number_of_events
346 <        if (self.selectEventsPerJob):
347 <            eventsPerJobRequested = self.eventsPerJob
348 <            if (self.selectNumberOfJobs):
349 <                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
350 <
351 <        # If user requested all the events in the dataset
352 <        if (totalEventsRequested == -1):
353 <            eventsRemaining=self.maxEvents
354 <        # If user requested more events than are in the dataset
355 <        elif (totalEventsRequested > self.maxEvents):
356 <            eventsRemaining = self.maxEvents
357 <            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
358 <        # If user requested less events than are in the dataset
359 <        else:
360 <            eventsRemaining = totalEventsRequested
361 <
362 <        # If user requested more events per job than are in the dataset
363 <        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
364 <            eventsPerJobRequested = self.maxEvents
365 <
366 <        # For user info at end
367 <        totalEventCount = 0
368 <
369 <        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
370 <            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
371 <
372 <        if (self.selectNumberOfJobs):
373 <            common.logger.message("May not create the exact number_of_jobs requested.")
374 <
375 <        if ( self.ncjobs == 'all' ) :
376 <            totalNumberOfJobs = 999999999
377 <        else :
378 <            totalNumberOfJobs = self.ncjobs
379 <
380 <        blocks = blockSites.keys()
381 <        blockCount = 0
382 <        # Backup variable in case self.maxEvents counted events in a non-included block
383 <        numBlocksInDataset = len(blocks)
384 <
385 <        jobCount = 0
386 <        list_of_lists = []
387 <
388 <        # list tracking which jobs are in which jobs belong to which block
389 <        jobsOfBlock = {}
390 <
391 <        # ---- Iterate over the blocks in the dataset until ---- #
392 <        # ---- we've met the requested total # of events    ---- #
393 <        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
394 <            block = blocks[blockCount]
395 <            blockCount += 1
396 <            if block not in jobsOfBlock.keys() :
397 <                jobsOfBlock[block] = []
398 <
399 <            if self.eventsbyblock.has_key(block) :
400 <                numEventsInBlock = self.eventsbyblock[block]
401 <                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
402 <
403 <                files = self.filesbyblock[block]
404 <                numFilesInBlock = len(files)
405 <                if (numFilesInBlock <= 0):
406 <                    continue
407 <                fileCount = 0
429 >        [allSites.append(it) for it in allSites if not allSites.count(it)]
430  
409                # ---- New block => New job ---- #
410                parString = ""
411                # counter for number of events in files currently worked on
412                filesEventCount = 0
413                # flag if next while loop should touch new file
414                newFile = 1
415                # job event counter
416                jobSkipEventCount = 0
417
418                # ---- Iterate over the files in the block until we've met the requested ---- #
419                # ---- total # of events or we've gone over all the files in this block  ---- #
420                pString=''
421                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
422                    file = files[fileCount]
423                    if self.useParent:
424                        parent = self.parentFiles[file]
425                        for f in parent :
426                            pString += '\\\"' + f + '\\\"\,'
427                        common.logger.debug(6, "File "+str(file)+" has the following parents: "+str(parent))
428                        common.logger.write("File "+str(file)+" has the following parents: "+str(parent))
429                    if newFile :
430                        try:
431                            numEventsInFile = self.eventsbyfile[file]
432                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
433                            # increase filesEventCount
434                            filesEventCount += numEventsInFile
435                            # Add file to current job
436                            parString += '\\\"' + file + '\\\"\,'
437                            newFile = 0
438                        except KeyError:
439                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
440
441                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
442                    # if less events in file remain than eventsPerJobRequested
443                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
444                        # if last file in block
445                        if ( fileCount == numFilesInBlock-1 ) :
446                            # end job using last file, use remaining events in block
447                            # close job and touch new file
448                            fullString = parString[:-2]
449                            if self.useParent:
450                                fullParentString = pString[:-2]
451                                list_of_lists.append([fullString,fullParentString,str(-1),str(jobSkipEventCount)])
452                            else:
453                                list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
454                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
455                            self.jobDestination.append(blockSites[block])
456                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
457                            # fill jobs of block dictionary
458                            jobsOfBlock[block].append(jobCount+1)
459                            # reset counter
460                            jobCount = jobCount + 1
461                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
462                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
463                            jobSkipEventCount = 0
464                            # reset file
465                            pString = ""
466                            parString = ""
467                            filesEventCount = 0
468                            newFile = 1
469                            fileCount += 1
470                        else :
471                            # go to next file
472                            newFile = 1
473                            fileCount += 1
474                    # if events in file equal to eventsPerJobRequested
475                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
476                        # close job and touch new file
477                        fullString = parString[:-2]
478                        if self.useParent:
479                            fullParentString = pString[:-2]
480                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
481                        else:
482                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
483                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
484                        self.jobDestination.append(blockSites[block])
485                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
486                        jobsOfBlock[block].append(jobCount+1)
487                        # reset counter
488                        jobCount = jobCount + 1
489                        totalEventCount = totalEventCount + eventsPerJobRequested
490                        eventsRemaining = eventsRemaining - eventsPerJobRequested
491                        jobSkipEventCount = 0
492                        # reset file
493                        pString = ""
494                        parString = ""
495                        filesEventCount = 0
496                        newFile = 1
497                        fileCount += 1
498
499                    # if more events in file remain than eventsPerJobRequested
500                    else :
501                        # close job but don't touch new file
502                        fullString = parString[:-2]
503                        if self.useParent:
504                            fullParentString = pString[:-2]
505                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
506                        else:
507                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
508                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
509                        self.jobDestination.append(blockSites[block])
510                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
511                        jobsOfBlock[block].append(jobCount+1)
512                        # increase counter
513                        jobCount = jobCount + 1
514                        totalEventCount = totalEventCount + eventsPerJobRequested
515                        eventsRemaining = eventsRemaining - eventsPerJobRequested
516                        # calculate skip events for last file
517                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
518                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
519                        # remove all but the last file
520                        filesEventCount = self.eventsbyfile[file]
521                        if self.useParent:
522                            for f in parent : pString += '\\\"' + f + '\\\"\,'
523                        parString = '\\\"' + file + '\\\"\,'
524                    pass # END if
525                pass # END while (iterate over files in the block)
526        pass # END while (iterate over blocks in the dataset)
527        self.ncjobs = self.total_number_of_jobs = jobCount
528        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
529            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
530        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
431  
432          # screen output
433 <        screenOutput = "List of jobs and available destination sites:\n\n"
434 <
435 <        # keep trace of block with no sites to print a warning at the end
436 <        noSiteBlock = []
437 <        bloskNoSite = []
438 <
439 <        blockCounter = 0
540 <        for block in blocks:
541 <            if block in jobsOfBlock.keys() :
542 <                blockCounter += 1
543 <                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),
544 <                    ','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
545 <                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
546 <                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
547 <                    bloskNoSite.append( blockCounter )
548 <
549 <        common.logger.message(screenOutput)
550 <        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
551 <            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
552 <            virgola = ""
553 <            if len(bloskNoSite) > 1:
554 <                virgola = ","
555 <            for block in bloskNoSite:
556 <                msg += ' ' + str(block) + virgola
557 <            msg += '\n               Related jobs:\n                 '
558 <            virgola = ""
559 <            if len(noSiteBlock) > 1:
560 <                virgola = ","
561 <            for range_jobs in noSiteBlock:
562 <                msg += str(range_jobs) + virgola
563 <            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
564 <            if self.cfg_params.has_key('EDG.se_white_list'):
565 <                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
566 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
567 <                msg += 'Please check if the dataset is available at this site!)\n'
568 <            if self.cfg_params.has_key('EDG.ce_white_list'):
569 <                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
570 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
571 <                msg += 'Please check if the dataset is available at this site!)\n'
572 <
573 <            common.logger.message(msg)
574 <
575 <        self.list_of_args = list_of_lists
576 <        return
577 <
578 <    def jobSplittingNoInput(self):
579 <        """
580 <        Perform job splitting based on number of event per job
581 <        """
582 <        common.logger.debug(5,'Splitting per events')
583 <
584 <        if (self.selectEventsPerJob):
585 <            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
586 <        if (self.selectNumberOfJobs):
587 <            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
588 <        if (self.selectTotalNumberEvents):
589 <            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
590 <
591 <        if (self.total_number_of_events < 0):
592 <            msg='Cannot split jobs per Events with "-1" as total number of events'
593 <            raise CrabException(msg)
594 <
595 <        if (self.selectEventsPerJob):
596 <            if (self.selectTotalNumberEvents):
597 <                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
598 <            elif(self.selectNumberOfJobs) :
599 <                self.total_number_of_jobs =self.theNumberOfJobs
600 <                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
601 <
602 <        elif (self.selectNumberOfJobs) :
603 <            self.total_number_of_jobs = self.theNumberOfJobs
604 <            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
605 <
606 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
607 <
608 <        # is there any remainder?
609 <        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
610 <
611 <        common.logger.debug(5,'Check  '+str(check))
612 <
613 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
614 <        if check > 0:
615 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
616 <
617 <        # argument is seed number.$i
618 <        self.list_of_args = []
619 <        for i in range(self.total_number_of_jobs):
620 <            ## Since there is no input, any site is good
621 <            self.jobDestination.append([""]) #must be empty to write correctly the xml
622 <            args=[]
623 <            if (self.firstRun):
624 <                ## pythia first run
625 <                args.append(str(self.firstRun)+str(i))
626 <            self.list_of_args.append(args)
627 <
628 <        return
629 <
630 <
631 <    def jobSplittingForScript(self):
632 <        """
633 <        Perform job splitting based on number of job
634 <        """
635 <        common.logger.debug(5,'Splitting per job')
636 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
637 <
638 <        self.total_number_of_jobs = self.theNumberOfJobs
639 <
640 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
433 >        if self.ads or self.lumiMask:
434 >            common.logger.info("Requested (A)DS %s has %s block(s)." %
435 >                               (datasetPath, len(self.filesbyblock.keys())))
436 >        else:
437 >            common.logger.info("Requested dataset: " + datasetPath + \
438 >                " has " + str(self.maxEvents) + " events in " + \
439 >                str(len(self.filesbyblock.keys())) + " blocks.\n")
440  
441 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
441 >        return sites
442  
644        # argument is seed number.$i
645        self.list_of_args = []
646        for i in range(self.total_number_of_jobs):
647            self.jobDestination.append([""])
648            self.list_of_args.append([str(i)])
649        return
443  
444      def split(self, jobParams,firstJobID):
445  
446 <        njobs = self.total_number_of_jobs
447 <        arglist = self.list_of_args
446 >        jobParams = self.dict['args']
447 >        njobs = self.dict['njobs']
448 >        self.jobDestination = self.dict['jobDestination']
449 >
450 >        if njobs == 0:
451 >            raise CrabException("Asked to split zero jobs: aborting")
452 >        if not self.server and not self.local and njobs > 500:
453 >            ######### FEDE FOR BUG 73010 ##########
454 >            msg = "The CRAB client will not submit more than 500 jobs.\n"
455 >            msg += "      Use the server mode or submit your jobs in smaller groups"
456 >            add = '\n'
457 >            import shutil
458 >            try:
459 >                add += '      --->> Removing the working_dir ' +  common.work_space._top_dir + ' \n'
460 >                shutil.rmtree(common.work_space._top_dir)
461 >            except OSError:
462 >                add += '      Warning: problems removing the working_dir ' + common.work_space._top_dir + ' \n'
463 >                add += '      Please remove it by hand.'
464 >            msg += add
465 >            raise CrabException(msg)
466 >            #######################################
467          # create the empty structure
468          for i in range(njobs):
469              jobParams.append("")
470  
471          listID=[]
472          listField=[]
473 +        listDictions=[]
474 +        exist= os.path.exists(self.argsFile)
475          for id in range(njobs):
476              job = id + int(firstJobID)
663            jobParams[id] = arglist[id]
477              listID.append(job+1)
478              job_ToSave ={}
479              concString = ' '
480              argu=''
481 +            str_argu = str(job+1)
482              if len(jobParams[id]):
483 <                argu +=   concString.join(jobParams[id] )
484 <            job_ToSave['arguments']= str(job+1)+' '+argu
483 >                argu = {'JobID': job+1}
484 >                for i in range(len(jobParams[id])):
485 >                    argu[self.dict['params'][i]]=jobParams[id][i]
486 >                    if len(jobParams[id])==1: self.NumEvents = jobParams[id][i]
487 >                # just for debug
488 >                str_argu += concString.join(jobParams[id])
489 >            if argu != '': listDictions.append(argu)
490 >            job_ToSave['arguments']= '%d %d'%( (job+1), 0)
491              job_ToSave['dlsDestination']= self.jobDestination[id]
492              listField.append(job_ToSave)
493 <            msg="Job "+str(job)+" Arguments:   "+str(job+1)+" "+argu+"\n"  \
494 <            +"                     Destination: "+str(self.jobDestination[id])
495 <            common.logger.debug(5,msg)
493 >            from ProdCommon.SiteDB.CmsSiteMapper import CmsSEMap
494 >            cms_se = CmsSEMap()
495 >            msg="Job  %s  Arguments:  %s\n"%(str(job+1),str_argu)
496 >            msg+="\t  Destination: %s "%(str(self.jobDestination[id]))
497 >            SEDestination = [cms_se[dest] for dest in self.jobDestination[id]]
498 >            msg+="\t  CMSDestination: %s "%(str(SEDestination))
499 >            common.logger.log(10-1,msg)
500 >        # write xml
501 >        if len(listDictions):
502 >            if exist==False: self.CreateXML()
503 >            self.addEntry(listDictions)
504          common._db.updateJob_(listID,listField)
505 <        self.argsList = (len(jobParams[0])+1)
505 >        return
506  
507 +    def CreateXML(self):
508 +        """
509 +        """
510 +        result = IMProvNode( self.rootArgsFilename )
511 +        outfile = file( self.argsFile, 'w').write(str(result))
512 +        return
513 +
514 +    def addEntry(self, listDictions):
515 +        """
516 +        _addEntry_
517 +
518 +        add an entry to the xml file
519 +        """
520 +        ## load xml
521 +        improvDoc = loadIMProvFile(self.argsFile)
522 +        entrname= 'Job'
523 +        for dictions in listDictions:
524 +           report = IMProvNode(entrname , None, **dictions)
525 +           improvDoc.addNode(report)
526 +        outfile = file( self.argsFile, 'w').write(str(improvDoc))
527          return
528  
529      def numberOfJobs(self):
530 <        return self.total_number_of_jobs
530 > #wmbs
531 >        if self.automation==0:
532 >           return self.dict['njobs']
533 >        else:
534 >           return None
535  
536      def getTarBall(self, exe):
537          """
538          Return the TarBall with lib and exe
539          """
540 <        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
540 >        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
541          if os.path.exists(self.tgzNameWithPath):
542              return self.tgzNameWithPath
543  
# Line 702 | Line 554 | class Cmssw(JobType):
554  
555          ## check if working area is release top
556          if swReleaseTop == '' or swArea == swReleaseTop:
557 <            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
557 >            common.logger.debug("swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
558              return
559  
560          import tarfile
# Line 717 | Line 569 | class Cmssw(JobType):
569                  ## then check if it's private or not
570                  if exeWithPath.find(swReleaseTop) == -1:
571                      # the exe is private, so we must ship
572 <                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
572 >                    common.logger.debug("Exe "+exeWithPath+" to be tarred")
573                      path = swArea+'/'
574                      # distinguish case when script is in user project area or given by full path somewhere else
575                      if exeWithPath.find(path) >= 0 :
# Line 731 | Line 583 | class Cmssw(JobType):
583                      pass
584  
585              ## Now get the libraries: only those in local working area
586 +            tar.dereference=True
587              libDir = 'lib'
588              lib = swArea+'/' +libDir
589 <            common.logger.debug(5,"lib "+lib+" to be tarred")
589 >            common.logger.debug("lib "+lib+" to be tarred")
590              if os.path.exists(lib):
591                  tar.add(lib,libDir)
592  
# Line 742 | Line 595 | class Cmssw(JobType):
595              module = swArea + '/' + moduleDir
596              if os.path.isdir(module):
597                  tar.add(module,moduleDir)
598 +            tar.dereference=False
599  
600              ## Now check if any data dir(s) is present
601              self.dataExist = False
# Line 755 | Line 609 | class Cmssw(JobType):
609                      todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
610                      if name == 'data':
611                          self.dataExist=True
612 <                        common.logger.debug(5,"data "+entry+" to be tarred")
612 >                        common.logger.debug("data "+entry+" to be tarred")
613                          tar.add(swArea+"/src/"+entry,"src/"+entry)
614                      pass
615                  pass
# Line 763 | Line 617 | class Cmssw(JobType):
617              ### CMSSW ParameterSet
618              if not self.pset is None:
619                  cfg_file = common.work_space.jobDir()+self.configFilename()
620 +                pickleFile = common.work_space.jobDir()+self.configFilename() + '.pkl'
621                  tar.add(cfg_file,self.configFilename())
622 <                common.logger.debug(5,"File added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
622 >                tar.add(pickleFile,self.configFilename() + '.pkl')
623  
624 +            try:
625 +                crab_cfg_file = common.work_space.shareDir()+'/crab.cfg'
626 +                tar.add(crab_cfg_file,'crab.cfg')
627 +            except:
628 +                pass
629  
630              ## Add ProdCommon dir to tar
631              prodcommonDir = './'
632              prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
633 <            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools','ProdCommon/Core','ProdCommon/MCPayloads', 'IMProv']
633 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools', \
634 >                           'ProdCommon/Core', 'ProdCommon/MCPayloads', 'IMProv', 'ProdCommon/Storage', \
635 >                           'WMCore/__init__.py','WMCore/Algorithms']
636              for file in neededStuff:
637                  tar.add(prodcommonPath+file,prodcommonDir+file)
776            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
638  
639              ##### ML stuff
640              ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
641              path=os.environ['CRABDIR'] + '/python/'
642              for file in ML_file_list:
643                  tar.add(path+file,file)
783            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
644  
645              ##### Utils
646 <            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py']
646 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py','cmscp.py']
647              for file in Utils_file_list:
648                  tar.add(path+file,file)
789            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
649  
650              ##### AdditionalFiles
651 +            tar.dereference=True
652              for file in self.additional_inbox_files:
653                  tar.add(file,string.split(file,'/')[-1])
654 <            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
654 >            tar.dereference=False
655 >            common.logger.log(10-1,"Files in "+self.tgzNameWithPath+" : "+str(tar.getnames()))
656  
657              tar.close()
658 <        except IOError:
659 <            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
660 <        except tarfile.TarError:
661 <            raise CrabException('Could not create tar-ball '+self.tgzNameWithPath)
658 >        except IOError, exc:
659 >            msg = 'Could not create tar-ball %s \n'%self.tgzNameWithPath
660 >            msg += str(exc)
661 >            raise CrabException(msg)
662 >        except tarfile.TarError, exc:
663 >            msg = 'Could not create tar-ball %s \n'%self.tgzNameWithPath
664 >            msg += str(exc)
665 >            raise CrabException(msg)
666  
802        ## check for tarball size
667          tarballinfo = os.stat(self.tgzNameWithPath)
668          if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
669 <            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
669 >            if not self.server:
670 >                msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + \
671 >                         str(self.MaxTarBallSize) +'MB input sandbox limit \n'
672 >                msg += '      and not supported by the direct GRID submission system.\n'
673 >                msg += '      Please use the CRAB server mode by setting server_name=<NAME> in section [CRAB] of your crab.cfg.\n'
674 >                msg += '      For further infos please see https://twiki.cern.ch/twiki/bin/view/CMS/SWGuideCrabServerForUsers#Server_available_for_users'
675 >            else:
676 >                msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' +  \
677 >                        str(self.MaxTarBallSize) +'MB input sandbox limit in the server.'
678 >            raise CrabException(msg)
679  
680          ## create tar-ball with ML stuff
681  
# Line 811 | Line 684 | class Cmssw(JobType):
684          Returns part of a job script which prepares
685          the execution environment for the job 'nj'.
686          """
687 <        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
688 <            psetName = 'pset.py'
816 <        else:
817 <            psetName = 'pset.cfg'
687 >        psetName = 'pset.py'
688 >
689          # Prepare JobType-independent part
690          txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
691          txt += 'echo ">>> setup environment"\n'
692 <        txt += 'if [ $middleware == LCG ]; then \n'
692 >        txt += 'echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
693 >        txt += 'export SCRAM_ARCH=' + self.executable_arch + '\n'
694 >        txt += 'echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
695 >        txt += 'if [ $middleware == LCG ] || [ $middleware == CAF ] || [ $middleware == LSF ]; then \n'
696          txt += self.wsSetupCMSLCGEnvironment_()
697          txt += 'elif [ $middleware == OSG ]; then\n'
698          txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
# Line 833 | Line 707 | class Cmssw(JobType):
707          txt += '    cd $WORKING_DIR\n'
708          txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
709          txt += self.wsSetupCMSOSGEnvironment_()
710 +        #Setup SGE Environment
711 +        txt += 'elif [ $middleware == SGE ]; then\n'
712 +        txt += self.wsSetupCMSLCGEnvironment_()
713 +
714 +        txt += 'elif [ $middleware == ARC ]; then\n'
715 +        txt += self.wsSetupCMSLCGEnvironment_()
716 +
717 +        #Setup PBS Environment
718 +        txt += 'elif [ $middleware == PBS ]; then\n'
719 +        txt += self.wsSetupCMSLCGEnvironment_()
720 +
721          txt += 'fi\n'
722  
723          # Prepare JobType-specific part
# Line 848 | Line 733 | class Cmssw(JobType):
733          txt += '    func_exit\n'
734          txt += 'fi \n'
735          txt += 'cd '+self.version+'\n'
736 <        txt += 'SOFTWARE_DIR=`pwd`\n'
736 >        txt += 'SOFTWARE_DIR=`pwd`; export SOFTWARE_DIR\n'
737          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
738          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
739          txt += 'if [ $? != 0 ] ; then\n'
# Line 859 | Line 744 | class Cmssw(JobType):
744          txt += 'fi \n'
745          # Handle the arguments:
746          txt += "\n"
747 <        txt += "## number of arguments (first argument always jobnumber)\n"
747 >        txt += "## number of arguments (first argument always jobnumber, the second is the resubmission number)\n"
748          txt += "\n"
749          txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
750          txt += "then\n"
# Line 875 | Line 760 | class Cmssw(JobType):
760              txt += '\n'
761              txt += 'DatasetPath='+self.datasetPath+'\n'
762  
763 <            datasetpath_split = self.datasetPath.split("/")
764 <            ### FEDE FOR NEW LFN ###
880 <            self.primaryDataset = datasetpath_split[1]
881 <            ########################
882 <            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
883 <            txt += 'DataTier='+datasetpath_split[2]+'\n'
763 >            txt += 'PrimaryDataset='+self.primaryDataset +'\n'
764 >            txt += 'DataTier='+self.dataTier+'\n'
765              txt += 'ApplicationFamily=cmsRun\n'
766  
767          else:
768              txt += 'DatasetPath=MCDataTier\n'
888            ### FEDE FOR NEW LFN ###
889            self.primaryDataset = 'null'
890            ########################
769              txt += 'PrimaryDataset=null\n'
770              txt += 'DataTier=null\n'
771              txt += 'ApplicationFamily=MCDataTier\n'
772          if self.pset != None:
773              pset = os.path.basename(job.configFilename())
774 +            pkl  = os.path.basename(job.configFilename()) + '.pkl'
775              txt += '\n'
776              txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
777 <            if (self.datasetPath): # standard job
899 <                txt += 'InputFiles=${args[1]}; export InputFiles\n'
900 <                if (self.useParent):
901 <                    txt += 'ParentFiles=${args[2]}; export ParentFiles\n'
902 <                    txt += 'MaxEvents=${args[3]}; export MaxEvents\n'
903 <                    txt += 'SkipEvents=${args[4]}; export SkipEvents\n'
904 <                else:
905 <                    txt += 'MaxEvents=${args[2]}; export MaxEvents\n'
906 <                    txt += 'SkipEvents=${args[3]}; export SkipEvents\n'
907 <                txt += 'echo "Inputfiles:<$InputFiles>"\n'
908 <                if (self.useParent): txt += 'echo "ParentFiles:<$ParentFiles>"\n'
909 <                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
910 <                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
911 <            else:  # pythia like job
912 <                txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
913 <                txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
914 <                txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
915 <                txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
916 <                if (self.firstRun):
917 <                    txt += 'FirstRun=${args[1]}; export FirstRun\n'
918 <                    txt += 'echo "FirstRun: <$FirstRun>"\n'
919 <
920 <            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
777 >            txt += 'cp  $RUNTIME_AREA/'+pkl+' .\n'
778  
779 +            txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
780 +            txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
781 +            txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
782 +            txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
783  
784 <        if self.pset != None:
785 <            # FUTURE: Can simply for 2_1_x and higher
786 <            txt += '\n'
787 <            if self.debug_wrapper==True:
788 <                txt += 'echo "***** cat ' + psetName + ' *********"\n'
789 <                txt += 'cat ' + psetName + '\n'
929 <                txt += 'echo "****** end ' + psetName + ' ********"\n'
930 <                txt += '\n'
931 <            if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
932 <                txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
933 <            else:
934 <                txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
935 <            txt += 'echo "PSETHASH = $PSETHASH" \n'
784 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
785 >            if self.var_filter:
786 >                #print "self.var_filter = ",self.var_filter
787 >                txt += "export var_filter="+"'"+self.var_filter+"'\n"
788 >                txt += 'echo $var_filter'
789 >        else:
790              txt += '\n'
791 +            if self.AdditionalArgs: txt += 'export AdditionalArgs=\"%s\"\n'%(self.AdditionalArgs)
792 +            if int(self.NumEvents) != 0: txt += 'export MaxEvents=%s\n'%str(self.NumEvents)
793          return txt
794  
795      def wsUntarSoftware(self, nj=0):
# Line 945 | Line 801 | class Cmssw(JobType):
801          txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
802  
803          if os.path.isfile(self.tgzNameWithPath):
804 <            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
805 <            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
950 <            if  self.debug_wrapper:
951 <                txt += 'ls -Al \n'
804 >            txt += 'echo ">>> tar xzf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
805 >            txt += 'tar zxvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
806              txt += 'untar_status=$? \n'
807 +            #### FEDE FOR BUG 78585 ##########
808 +            if  self.debug_wrapper==1 :
809 +                txt += 'echo "----------------" \n'
810 +                txt += 'ls -AlR $RUNTIME_AREA \n'
811 +                txt += 'echo "----------------" \n'
812              txt += 'if [ $untar_status -ne 0 ]; then \n'
813              txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
814              txt += '   job_exit_code=$untar_status\n'
815              txt += '   func_exit\n'
816              txt += 'else \n'
817              txt += '   echo "Successful untar" \n'
818 +            txt += '   chmod a+w -R $RUNTIME_AREA \n'
819 +            if  self.debug_wrapper==1 :
820 +                txt += '   echo "changed in a+w the permission of $RUNTIME_AREA "\n'
821 +                txt += '   ls -AlR $RUNTIME_AREA \n'
822              txt += 'fi \n'
823 +            ###########################
824              txt += '\n'
825              txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
826              txt += 'if [ -z "$PYTHONPATH" ]; then\n'
# Line 979 | Line 843 | class Cmssw(JobType):
843  
844          txt = '\n#Written by cms_cmssw::wsBuildExe\n'
845          txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
846 <
847 <        txt += 'rm -r lib/ module/ \n'
846 >        
847 >        ############ FEDE FOR BUG 78585 #####################
848 >        txt += 'rm -rf lib/ module/ \n'
849 >        #######################################
850          txt += 'mv $RUNTIME_AREA/lib/ . \n'
851          txt += 'mv $RUNTIME_AREA/module/ . \n'
852          if self.dataExist == True:
853 <            txt += 'rm -r src/ \n'
853 >            ############ FEDE FOR BUG 78585 #####################
854 >            txt += 'rm -rf src/ \n'
855 >            ######################################
856              txt += 'mv $RUNTIME_AREA/src/ . \n'
857          if len(self.additional_inbox_files)>0:
858              for file in self.additional_inbox_files:
859                  txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
992        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
993        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
860  
861          txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
862          txt += 'if [ -z "$PYTHONPATH" ]; then\n'
# Line 1001 | Line 867 | class Cmssw(JobType):
867          txt += 'fi\n'
868          txt += '\n'
869  
870 +        if self.pset != None:
871 +            psetName = 'pset.py'
872 +
873 +            txt += '\n'
874 +            if self.debug_wrapper == 1:
875 +                txt += 'echo "***** cat ' + psetName + ' *********"\n'
876 +                txt += 'cat ' + psetName + '\n'
877 +                txt += 'echo "****** end ' + psetName + ' ********"\n'
878 +                txt += '\n'
879 +                txt += 'echo "***********************" \n'
880 +                txt += 'which edmConfigHash \n'
881 +                txt += 'echo "***********************" \n'
882 +            txt += 'edmConfigHash ' + psetName + ' \n'
883 +            txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
884 +            txt += 'echo "PSETHASH = $PSETHASH" \n'
885 +            #### temporary fix for noEdm files #####
886 +            txt += 'if [ -z "$PSETHASH" ]; then \n'
887 +            txt += '   export PSETHASH=null\n'
888 +            txt += 'fi \n'
889 +            #############################################
890 +            txt += '\n'
891          return txt
892  
893  
# Line 1011 | Line 898 | class Cmssw(JobType):
898              return self.executable
899  
900      def executableArgs(self):
901 <        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
902 <        if self.scriptExe:#CarlosDaniele
1016 <            return   self.scriptExe + " $NJob"
901 >        if self.scriptExe:
902 >            return os.path.basename(self.scriptExe) + " $NJob $AdditionalArgs"
903          else:
904 <            ex_args = ""
1019 <            # FUTURE: This tests the CMSSW version. Can remove code as versions deprecated
1020 <            # Framework job report
1021 <            if (self.CMSSW_major >= 1 and self.CMSSW_minor >= 5) or (self.CMSSW_major >= 2):
1022 <                ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
1023 <            # Type of config file
1024 <            if self.CMSSW_major >= 2 :
1025 <                ex_args += " -p pset.py"
1026 <            else:
1027 <                ex_args += " -p pset.cfg"
1028 <            return ex_args
904 >            return " -j $RUNTIME_AREA/crab_fjr_$NJob.xml -p pset.py"
905  
906      def inputSandbox(self, nj):
907          """
# Line 1034 | Line 910 | class Cmssw(JobType):
910          inp_box = []
911          if os.path.isfile(self.tgzNameWithPath):
912              inp_box.append(self.tgzNameWithPath)
913 <        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
914 <        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
913 >        if os.path.isfile(self.argsFile):
914 >            inp_box.append(self.argsFile)
915 >        inp_box.append(common.work_space.jobDir() + self.scriptName)
916          return inp_box
917  
918      def outputSandbox(self, nj):
# Line 1059 | Line 936 | class Cmssw(JobType):
936          txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
937          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
938          txt += 'echo ">>> current directory content:"\n'
939 <        if self.debug_wrapper:
939 >        if self.debug_wrapper==1:
940              txt += 'ls -Al\n'
941          txt += '\n'
942  
943          for fileWithSuffix in (self.output_file):
944 <            output_file_num = numberFile(fileWithSuffix, '$NJob')
944 >            output_file_num = numberFile(fileWithSuffix, '$OutUniqueID')
945              txt += '\n'
946              txt += '# check output file\n'
947              txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
# Line 1085 | Line 962 | class Cmssw(JobType):
962              txt += 'fi\n'
963          file_list = []
964          for fileWithSuffix in (self.output_file):
965 <             file_list.append(numberFile(fileWithSuffix, '$NJob'))
965 >             file_list.append(numberFile('$SOFTWARE_DIR/'+fileWithSuffix, '$OutUniqueID'))
966  
967 <        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
967 >        txt += 'file_list="'+string.join(file_list,',')+'"\n'
968          txt += '\n'
969          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
970          txt += 'echo ">>> current directory content:"\n'
971 <        if self.debug_wrapper:
971 >        if self.debug_wrapper==1:
972              txt += 'ls -Al\n'
973          txt += '\n'
974          txt += 'cd $RUNTIME_AREA\n'
# Line 1113 | Line 990 | class Cmssw(JobType):
990                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
991  
992          req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
993 <        if ( common.scheduler.name() == "glitecoll" ) or ( common.scheduler.name() == "glite"):
994 <            req += ' && other.GlueCEStateStatus == "Production" '
993 >        if ( common.scheduler.name() in ["glite"] ):
994 >            ## 25-Jun-2009 SL: patch to use Cream enabled WMS
995 >            if ( self.cfg_params.get('GRID.use_cream',None) ):
996 >                req += ' && (other.GlueCEStateStatus == "Production" || other.GlueCEStateStatus == "Special")'
997 >            else:
998 >                req += ' && other.GlueCEStateStatus == "Production" '
999  
1000          return req
1001  
1002      def configFilename(self):
1003          """ return the config filename """
1004 <        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
1124 <        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
1125 <          return self.name()+'.py'
1126 <        else:
1127 <          return self.name()+'.cfg'
1004 >        return self.name()+'.py'
1005  
1006      def wsSetupCMSOSGEnvironment_(self):
1007          """
# Line 1184 | Line 1061 | class Cmssw(JobType):
1061          txt += '    echo "==> setup cms environment ok"\n'
1062          return txt
1063  
1064 <    def modifyReport(self, nj):
1064 >    def wsModifyReport(self, nj):
1065          """
1066          insert the part of the script that modifies the FrameworkJob Report
1067          """
1068 <        txt = '\n#Written by cms_cmssw::modifyReport\n'
1069 <        publish_data = int(self.cfg_params.get('USER.publish_data',0))
1070 <        if (publish_data == 1):
1071 <            processedDataset = self.cfg_params['USER.publish_data_name']
1195 <            if (self.primaryDataset == 'null'):
1196 <                 self.primaryDataset = processedDataset
1197 <            if (common.scheduler.name().upper() == "CAF" or common.scheduler.name().upper() == "LSF"):
1198 <                ### FEDE FOR NEW LFN ###
1199 <                LFNBaseName = LFNBase(self.primaryDataset, processedDataset, LocalUser=True)
1200 <                self.user = getUserName(LocalUser=True)
1201 <                ########################
1202 <            else :
1203 <                ### FEDE FOR NEW LFN ###
1204 <                LFNBaseName = LFNBase(self.primaryDataset, processedDataset)
1205 <                self.user = getUserName()
1206 <                ########################
1207 <
1208 <            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
1209 <            ### FEDE FOR NEW LFN ###
1210 <            #txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
1211 <            txt += '    FOR_LFN=%s/${PSETHASH}/\n'%(LFNBaseName)
1212 <            ########################
1213 <            txt += 'else\n'
1214 <            txt += '    FOR_LFN=/copy_problems/ \n'
1215 <            txt += '    SE=""\n'
1216 <            txt += '    SE_PATH=""\n'
1217 <            txt += 'fi\n'
1068 >
1069 >        txt = ''
1070 >        if (self.copy_data == 1):
1071 >            txt = '\n#Written by cms_cmssw::wsModifyReport\n'
1072  
1073              txt += 'echo ">>> Modify Job Report:" \n'
1074              txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1221            txt += 'ProcessedDataset='+processedDataset+'\n'
1222            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1223            txt += 'echo "SE = $SE"\n'
1224            txt += 'echo "SE_PATH = $SE_PATH"\n'
1225            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1075              txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1076 <            ### FEDE FOR NEW LFN ###
1077 <            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier ' + self.user + '-$ProcessedDataset-$PSETHASH $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1078 <            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier ' + self.user + '-$ProcessedDataset-$PSETHASH $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1079 <            ########################
1076 >
1077 >            args = 'fjr $RUNTIME_AREA/crab_fjr_$NJob.xml json $RUNTIME_AREA/resultCopyFile n_job $OutUniqueID PrimaryDataset $PrimaryDataset  ApplicationFamily $ApplicationFamily ApplicationName $executable cmssw_version $CMSSW_VERSION psethash $PSETHASH'
1078 >
1079 >            if (self.publish_data == 1):
1080 >                txt += 'ProcessedDataset='+self.processedDataset+'\n'
1081 >                txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1082 >                args += ' UserProcessedDataset $USER-$ProcessedDataset-$PSETHASH'
1083 >
1084 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'"\n'
1085 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'\n'
1086              txt += 'modifyReport_result=$?\n'
1087              txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1088              txt += '    modifyReport_result=70500\n'
# Line 1248 | Line 1103 | class Cmssw(JobType):
1103          txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1104          txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1105          txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1106 <        if self.debug_wrapper :
1106 >        if self.debug_wrapper==1 :
1107              txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1108 +        txt += '        cmd_out_1=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --popularity $MonitorID,$MonitorJobID,$RUNTIME_AREA/inputsReport.txt '+self.debugWrap+'`\n'
1109 + #        if self.debug_wrapper==1 :
1110 +        txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out_1"\n'
1111          txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1112          txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1113          txt += '            echo ">>> crab_fjr.xml contents: "\n'
# Line 1264 | Line 1122 | class Cmssw(JobType):
1122          txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1123          txt += '    fi\n'
1124            #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1125 <
1126 <        if (self.datasetPath and not (self.dataset_pu or self.useParent) :
1127 <          # VERIFY PROCESSED DATA
1270 <            txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1271 <            txt += '      echo ">>> Verify list of processed files:"\n'
1272 <            txt += '      echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1273 <            txt += '      python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1274 <            txt += '      cat input-files.txt  | sort | uniq > tmp.txt\n'
1275 <            txt += '      mv tmp.txt input-files.txt\n'
1276 <            txt += '      echo "cat input-files.txt"\n'
1277 <            txt += '      echo "----------------------"\n'
1278 <            txt += '      cat input-files.txt\n'
1279 <            txt += '      cat processed-files.txt | sort | uniq > tmp.txt\n'
1280 <            txt += '      mv tmp.txt processed-files.txt\n'
1281 <            txt += '      echo "----------------------"\n'
1282 <            txt += '      echo "cat processed-files.txt"\n'
1283 <            txt += '      echo "----------------------"\n'
1284 <            txt += '      cat processed-files.txt\n'
1285 <            txt += '      echo "----------------------"\n'
1286 <            txt += '      diff -q input-files.txt processed-files.txt\n'
1287 <            txt += '      fileverify_status=$?\n'
1288 <            txt += '      if [ $fileverify_status -ne 0 ]; then\n'
1289 <            txt += '         executable_exit_status=30001\n'
1290 <            txt += '         echo "ERROR ==> not all input files processed"\n'
1291 <            txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1292 <            txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1293 <            txt += '      fi\n'
1294 <            txt += '    fi\n'
1295 <            txt += '\n'
1125 >        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1126 >        txt += '        echo ">>> Executable succeded  $executable_exit_status"\n'
1127 >        txt += '    fi\n'
1128          txt += 'else\n'
1129          txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1130          txt += 'fi\n'
1131          txt += '\n'
1132 +        txt += 'if [ $executable_exit_status -ne 0 ];then\n'
1133 +        txt += '    echo ">>> Executable failed  $executable_exit_status"\n'
1134 +        txt += '    echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1135 +        txt += '    echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1136 +        txt += '    job_exit_code=$executable_exit_status\n'
1137 +        txt += '    func_exit\n'
1138 +        txt += 'fi\n\n'
1139          txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1140          txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1141          txt += 'job_exit_code=$executable_exit_status\n'
# Line 1309 | Line 1148 | class Cmssw(JobType):
1148      def getParams(self):
1149          return self._params
1150  
1151 <    def uniquelist(self, old):
1313 <        """
1314 <        remove duplicates from a list
1315 <        """
1316 <        nd={}
1317 <        for e in old:
1318 <            nd[e]=0
1319 <        return nd.keys()
1320 <
1321 <    def outList(self):
1151 >    def outList(self,list=False):
1152          """
1153          check the dimension of the output files
1154          """
# Line 1327 | Line 1157 | class Cmssw(JobType):
1157          listOutFiles = []
1158          stdout = 'CMSSW_$NJob.stdout'
1159          stderr = 'CMSSW_$NJob.stderr'
1160 +        if len(self.output_file) <= 0:
1161 +            msg ="WARNING: no output files name have been defined!!\n"
1162 +            msg+="\tno output files will be reported back/staged\n"
1163 +            common.logger.info(msg)
1164 +
1165          if (self.return_data == 1):
1166 <            for file in (self.output_file+self.output_file_sandbox):
1167 <                listOutFiles.append(numberFile(file, '$NJob'))
1168 <            listOutFiles.append(stdout)
1169 <            listOutFiles.append(stderr)
1170 <        else:
1171 <            for file in (self.output_file_sandbox):
1172 <                listOutFiles.append(numberFile(file, '$NJob'))
1338 <            listOutFiles.append(stdout)
1339 <            listOutFiles.append(stderr)
1166 >            for file in (self.output_file):
1167 >                listOutFiles.append(numberFile(file, '$OutUniqueID'))
1168 >        for file in (self.output_file_sandbox):
1169 >            listOutFiles.append(numberFile(file, '$NJob'))
1170 >        listOutFiles.append(stdout)
1171 >        listOutFiles.append(stderr)
1172 >
1173          txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1174          txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1175          txt += 'export filesToCheck\n'
1176 +        taskinfo={}
1177 +        taskinfo['outfileBasename'] = self.output_file
1178 +        common._db.updateTask_(taskinfo)
1179 +
1180 +        if list : return self.output_file
1181          return txt
1182 +
1183 +    def checkCMSSWVersion(self, url = "https://cmstags.cern.ch/tc/", fileName = "ReleasesXML"):
1184 +        """
1185 +        compare current CMSSW release and arch with allowed releases
1186 +        """
1187 +
1188 +        downloader = Downloader(url)
1189 +        goodRelease = False
1190 +        tagCollectorUrl = url + fileName
1191 +
1192 +        try:
1193 +            result = downloader.config(fileName)
1194 +        except:
1195 +            common.logger.info("ERROR: Problem reading file of allowed CMSSW releases.")
1196 +
1197 +        try:
1198 +            events = pulldom.parseString(result)
1199 +
1200 +            arch     = None
1201 +            release  = None
1202 +            relState = None
1203 +            for (event, node) in events:
1204 +                if event == pulldom.START_ELEMENT:
1205 +                    if node.tagName == 'architecture':
1206 +                        arch = node.attributes.getNamedItem('name').nodeValue
1207 +                    if node.tagName == 'project':
1208 +                        relState = node.attributes.getNamedItem('state').nodeValue
1209 +                        if relState == 'Announced':
1210 +                            release = node.attributes.getNamedItem('label').nodeValue
1211 +                if self.executable_arch == arch and self.version == release:
1212 +                    goodRelease = True
1213 +                    return goodRelease
1214 +
1215 +            if not goodRelease:
1216 +                msg = "WARNING: %s on %s is not among supported releases listed at %s ." % \
1217 +                        (self.version, self.executable_arch, tagCollectorUrl)
1218 +                msg += "Submission may fail."
1219 +                common.logger.info(msg)
1220 +        except:
1221 +            common.logger.info("Problems parsing file of allowed CMSSW releases.")
1222 +
1223 +        return goodRelease
1224 +

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines