ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.205 by slacapra, Thu Jun 5 16:34:04 2008 UTC vs.
Revision 1.314 by spiga, Thu Jun 18 17:23:02 2009 UTC

# Line 1 | Line 1
1   from JobType import JobType
2 from crab_logger import Logger
2   from crab_exceptions import *
3   from crab_util import *
5 from BlackWhiteListParser import BlackWhiteListParser
4   import common
5   import Scram
6 < from LFNBaseName import *
6 > from Splitter import JobSplitter
7  
8 + from IMProv.IMProvNode import IMProvNode
9   import os, string, glob
10  
11   class Cmssw(JobType):
12 <    def __init__(self, cfg_params, ncjobs):
12 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
13          JobType.__init__(self, 'CMSSW')
14 <        common.logger.debug(3,'CMSSW::__init__')
15 <
16 <        self.argsList = []
14 >        common.logger.debug('CMSSW::__init__')
15 >        self.skip_blocks = skip_blocks
16 >        self.argsList = 1
17  
18          self._params = {}
19          self.cfg_params = cfg_params
21        # init BlackWhiteListParser
22        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
20  
21 <        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
21 >        ### Temporary patch to automatically skip the ISB size check:
22 >        server=self.cfg_params.get('CRAB.server_name',None)
23 >        size = 9.5
24 >        if server or common.scheduler.name().upper() in ['LSF','CAF']: size = 99999
25 >        ### D.S.
26 >        self.MaxTarBallSize = float(self.cfg_params.get('GRID.maxtarballsize',size))
27  
28          # number of jobs requested to be created, limit obj splitting
29          self.ncjobs = ncjobs
30  
29        log = common.logger
30
31          self.scram = Scram.Scram(cfg_params)
32          self.additional_inbox_files = []
33          self.scriptExe = ''
34          self.executable = ''
35          self.executable_arch = self.scram.getArch()
36 <        self.tgz_name = 'default.tgz'
36 >        self.tgz_name = 'default.tar.gz'
37 >        self.tar_name = 'default.tar'
38          self.scriptName = 'CMSSW.sh'
39          self.pset = ''
40          self.datasetPath = ''
41  
42 +        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
43          # set FJR file name
44          self.fjrFileName = 'crab_fjr.xml'
45  
46          self.version = self.scram.getSWVersion()
47 +        common.logger.log(10-1,"CMSSW version is: "+str(self.version))
48 +
49          version_array = self.version.split('_')
50          self.CMSSW_major = 0
51          self.CMSSW_minor = 0
# Line 54 | Line 58 | class Cmssw(JobType):
58              msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
59              raise CrabException(msg)
60  
61 +        if self.CMSSW_major < 1 or (self.CMSSW_major == 1 and self.CMSSW_minor < 5):
62 +            msg = "CRAB supports CMSSW >= 1_5_x only. Use an older CRAB version."
63 +            raise CrabException(msg)
64 +            """
65 +            As CMSSW versions are dropped we can drop more code:
66 +            1.X dropped: drop support for running .cfg on WN
67 +            2.0 dropped: drop all support for cfg here and in writeCfg
68 +            2.0 dropped: Recheck the random number seed support
69 +            """
70 +
71          ### collect Data cards
72  
73 <        if not cfg_params.has_key('CMSSW.datasetpath'):
73 >
74 >        ### Temporary: added to remove input file control in the case of PU
75 >        self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
76 >
77 >        tmp =  cfg_params['CMSSW.datasetpath']
78 >        common.logger.log(10-1, "CMSSW::CMSSW(): datasetPath = "+tmp)
79 >
80 >        if tmp =='':
81              msg = "Error: datasetpath not defined "
82              raise CrabException(msg)
83 <        tmp =  cfg_params['CMSSW.datasetpath']
63 <        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
64 <        if string.lower(tmp)=='none':
83 >        elif string.lower(tmp)=='none':
84              self.datasetPath = None
85              self.selectNoInput = 1
86          else:
# Line 69 | Line 88 | class Cmssw(JobType):
88              self.selectNoInput = 0
89  
90          self.dataTiers = []
91 <        self.debugWrap = ''
92 <        self.debug_wrapper = cfg_params.get('USER.debug_wrapper',False)
93 <        if self.debug_wrapper: self.debugWrap='--debug'
91 >
92 >        self.debugWrap=''
93 >        self.debug_wrapper = int(cfg_params.get('USER.debug_wrapper',0))
94 >        if self.debug_wrapper == 1: self.debugWrap='--debug'
95 >
96          ## now the application
97 +        self.managedGenerators = ['madgraph', 'comphep', 'lhe']
98 +        self.generator = cfg_params.get('CMSSW.generator','pythia').lower()
99          self.executable = cfg_params.get('CMSSW.executable','cmsRun')
100 <        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
100 >        common.logger.log(10-1, "CMSSW::CMSSW(): executable = "+self.executable)
101  
102          if not cfg_params.has_key('CMSSW.pset'):
103              raise CrabException("PSet file missing. Cannot run cmsRun ")
104          self.pset = cfg_params['CMSSW.pset']
105 <        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
105 >        common.logger.log(10-1, "Cmssw::Cmssw(): PSet file = "+self.pset)
106          if self.pset.lower() != 'none' :
107              if (not os.path.exists(self.pset)):
108                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
# Line 94 | Line 117 | class Cmssw(JobType):
117          self.output_file_sandbox.append(self.fjrFileName)
118  
119          # other output files to be returned via sandbox or copied to SE
120 +        outfileflag = False
121          self.output_file = []
122          tmp = cfg_params.get('CMSSW.output_file',None)
123          if tmp :
124 <            tmpOutFiles = string.split(tmp,',')
125 <            log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
126 <            for tmp in tmpOutFiles:
127 <                tmp=string.strip(tmp)
104 <                self.output_file.append(tmp)
105 <                pass
106 <        else:
107 <            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
108 <        pass
124 >            self.output_file = [x.strip() for x in tmp.split(',')]
125 >            outfileflag = True #output found
126 >        #else:
127 >        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
128  
129          # script_exe file as additional file in inputSandbox
130          self.scriptExe = cfg_params.get('USER.script_exe',None)
# Line 115 | Line 134 | class Cmssw(JobType):
134                  raise CrabException(msg)
135              self.additional_inbox_files.append(string.strip(self.scriptExe))
136  
137 +        self.AdditionalArgs = cfg_params.get('USER.script_arguments',None)
138 +        if self.AdditionalArgs : self.AdditionalArgs = string.replace(self.AdditionalArgs,',',' ')
139 +
140          if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
141              msg ="Error. script_exe  not defined"
142              raise CrabException(msg)
143  
144 <        # use parent files...
145 <        self.useParent = self.cfg_params.get('CMSSW.use_parent',False)
144 >        # use parent files...
145 >        self.useParent = int(self.cfg_params.get('CMSSW.use_parent',0))
146  
147          ## additional input files
148          if cfg_params.has_key('USER.additional_input_files'):
# Line 143 | Line 165 | class Cmssw(JobType):
165                      self.additional_inbox_files.append(string.strip(file))
166                  pass
167              pass
168 <            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
168 >            common.logger.debug("Additional input files: "+str(self.additional_inbox_files))
169          pass
170  
149        ## Events per job
150        if cfg_params.has_key('CMSSW.events_per_job'):
151            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
152            self.selectEventsPerJob = 1
153        else:
154            self.eventsPerJob = -1
155            self.selectEventsPerJob = 0
156
157        ## number of jobs
158        if cfg_params.has_key('CMSSW.number_of_jobs'):
159            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
160            self.selectNumberOfJobs = 1
161        else:
162            self.theNumberOfJobs = 0
163            self.selectNumberOfJobs = 0
164
165        if cfg_params.has_key('CMSSW.total_number_of_events'):
166            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
167            self.selectTotalNumberEvents = 1
168            if self.selectNumberOfJobs  == 1:
169                if (self.total_number_of_events != -1) and int(self.total_number_of_events) < int(self.theNumberOfJobs):
170                    msg = 'Must specify at least one event per job. total_number_of_events > number_of_jobs '
171                    raise CrabException(msg)
172        else:
173            self.total_number_of_events = 0
174            self.selectTotalNumberEvents = 0
175
176        if self.pset != None:
177             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
178                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
179                 raise CrabException(msg)
180        else:
181             if (self.selectNumberOfJobs == 0):
182                 msg = 'Must specify  number_of_jobs.'
183                 raise CrabException(msg)
171  
172          ## New method of dealing with seeds
173          self.incrementSeeds = []
# Line 196 | Line 183 | class Cmssw(JobType):
183                  tmp.strip()
184                  self.incrementSeeds.append(tmp)
185  
199        ## Old method of dealing with seeds
200        ## FUTURE: This is for old CMSSW and old CRAB. Can throw exceptions after a couple of CRAB releases and then
201        ## remove
202        self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None)
203        if self.sourceSeed:
204            print "pythia_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
205            self.incrementSeeds.append('sourceSeed')
206            self.incrementSeeds.append('theSource')
207
208        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
209        if self.sourceSeedVtx:
210            print "vtx_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
211            self.incrementSeeds.append('VtxSmeared')
212
213        self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None)
214        if self.sourceSeedG4:
215            print "g4_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
216            self.incrementSeeds.append('g4SimHits')
217
218        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
219        if self.sourceSeedMix:
220            print "mix_seed is a deprecated parameter. Use preserve_seeds or increment_seeds in the future.\n","Added to increment_seeds."
221            self.incrementSeeds.append('mix')
222
186          self.firstRun = cfg_params.get('CMSSW.first_run',None)
187  
225        if self.pset != None: #CarlosDaniele
226            import PsetManipulator as pp
227            PsetEdit = pp.PsetManipulator(self.pset) #Daniele Pset
228
188          # Copy/return
230
189          self.copy_data = int(cfg_params.get('USER.copy_data',0))
190          self.return_data = int(cfg_params.get('USER.return_data',0))
191  
192 +        self.conf = {}
193 +        self.conf['pubdata'] = None
194 +        # number of jobs requested to be created, limit obj splitting DD
195          #DBSDLS-start
196          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
197          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
# Line 242 | Line 203 | class Cmssw(JobType):
203          if self.datasetPath:
204              blockSites = self.DataDiscoveryAndLocation(cfg_params)
205          #DBSDLS-end
206 <
206 >        self.conf['blockSites']=blockSites
207  
208          ## Select Splitting
209 +        splitByRun = int(cfg_params.get('CMSSW.split_by_run',0))
210 +
211          if self.selectNoInput:
212              if self.pset == None:
213 <                self.jobSplittingForScript()
213 >                self.algo = 'ForScript'
214              else:
215 <                self.jobSplittingNoInput()
215 >                self.algo = 'NoInput'
216 >                self.conf['managedGenerators']=self.managedGenerators
217 >                self.conf['generator']=self.generator
218 >        elif splitByRun ==1:
219 >            self.algo = 'RunBased'
220          else:
221 <            self.jobSplittingByBlocks(blockSites)
221 >            self.algo = 'EventBased'
222 >
223 > #        self.algo = 'LumiBased'
224 >        splitter = JobSplitter(self.cfg_params,self.conf)
225 >        self.dict = splitter.Algos()[self.algo]()
226 >
227 >        self.argsFile= '%s/arguments.xml'%common.work_space.shareDir()
228 >        self.rootArgsFilename= 'arguments'
229 >        # modify Pset only the first time
230 >        if (isNew and self.pset != None): self.ModifyPset()
231 >
232 >        ## Prepare inputSandbox TarBall (only the first time)
233 >        self.tarNameWithPath = self.getTarBall(self.executable)
234 >
235 >
236 >    def ModifyPset(self):
237 >        import PsetManipulator as pp
238 >        PsetEdit = pp.PsetManipulator(self.pset)
239 >        try:
240 >            # Add FrameworkJobReport to parameter-set, set max events.
241 >            # Reset later for data jobs by writeCFG which does all modifications
242 >            PsetEdit.maxEvent(1)
243 >            PsetEdit.skipEvent(0)
244 >            PsetEdit.psetWriter(self.configFilename())
245 >            ## If present, add TFileService to output files
246 >            if not int(self.cfg_params.get('CMSSW.skip_TFileService_output',0)):
247 >                tfsOutput = PsetEdit.getTFileService()
248 >                if tfsOutput:
249 >                    if tfsOutput in self.output_file:
250 >                        common.logger.debug("Output from TFileService "+tfsOutput+" already in output files")
251 >                    else:
252 >                        outfileflag = True #output found
253 >                        self.output_file.append(tfsOutput)
254 >                        common.logger.info("Adding "+tfsOutput+" (from TFileService) to list of output files")
255 >                    pass
256 >                pass
257 >            ## If present and requested, add PoolOutputModule to output files
258 >            edmOutput = PsetEdit.getPoolOutputModule()
259 >            if int(self.cfg_params.get('CMSSW.get_edm_output',0)):
260 >                if edmOutput:
261 >                    if edmOutput in self.output_file:
262 >                        common.logger.debug("Output from PoolOutputModule "+edmOutput+" already in output files")
263 >                    else:
264 >                        self.output_file.append(edmOutput)
265 >                        common.logger.info("Adding "+edmOutput+" (from PoolOutputModule) to list of output files")
266 >                    pass
267 >                pass
268 >            # not required: check anyhow if present, to avoid accidental T2 overload
269 >            else:
270 >                if edmOutput and (edmOutput not in self.output_file):
271 >                    msg = "ERROR: a PoolOutputModule is present in your ParameteSet %s \n"%self.pset
272 >                    msg +="         but the file produced ( %s ) is not in the list of output files\n"%edmOutput
273 >                    msg += "WARNING: please remove it. If you want to keep it, add the file to output_files or use CMSSW.get_edm_output\n"
274 >                    if int(self.cfg_params.get('CMSSW.ignore_edm_output',0)):
275 >                        msg +=" CMSSW.ignore_edm_output==True : Hope you know what you are doing...\n"
276 >                        common.logger.info(msg)
277 >                    else:
278 >                        raise CrabException(msg)
279 >                pass
280 >            pass
281 >
282 >            if (PsetEdit.getBadFilesSetting()):
283 >                msg = "WARNING: You have set skipBadFiles to True. This will continue processing on some errors and you may not be notified."
284 >                common.logger.info(msg)
285 >
286 >        except CrabException, msg:
287 >            common.logger.info(str(msg))
288 >            msg='Error while manipulating ParameterSet (see previous message, if any): exiting...'
289 >            raise CrabException(msg)
290  
256        # modify Pset
257        if self.pset != None:
258            try:
259                # Add FrameworkJobReport to parameter-set, set max events.
260                # Reset later for data jobs by writeCFG which does all modifications
261                PsetEdit.addCrabFJR(self.fjrFileName) # FUTURE: Job report addition not needed by CMSSW>1.5
262                PsetEdit.maxEvent(self.eventsPerJob)
263                PsetEdit.psetWriter(self.configFilename())
264            except:
265                msg='Error while manipulating ParameterSet: exiting...'
266                raise CrabException(msg)
267        self.tgzNameWithPath = self.getTarBall(self.executable)
291  
292      def DataDiscoveryAndLocation(self, cfg_params):
293  
294          import DataDiscovery
295          import DataLocation
296 <        common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
296 >        common.logger.log(10-1,"CMSSW::DataDiscoveryAndLocation()")
297  
298          datasetPath=self.datasetPath
299  
300          ## Contact the DBS
301 <        common.logger.message("Contacting Data Discovery Services ...")
301 >        common.logger.info("Contacting Data Discovery Services ...")
302          try:
303 <            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
303 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
304              self.pubdata.fetchDBSInfo()
305  
306          except DataDiscovery.NotExistingDatasetError, ex :
# Line 291 | Line 314 | class Cmssw(JobType):
314              raise CrabException(msg)
315  
316          self.filesbyblock=self.pubdata.getFiles()
317 <        self.eventsbyblock=self.pubdata.getEventsPerBlock()
318 <        self.eventsbyfile=self.pubdata.getEventsPerFile()
296 <        self.parentFiles=self.pubdata.getParent()
317 >        #print self.filesbyblock
318 >        self.conf['pubdata']=self.pubdata
319  
320          ## get max number of events
321          self.maxEvents=self.pubdata.getMaxEvents()
# Line 302 | Line 324 | class Cmssw(JobType):
324          try:
325              dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
326              dataloc.fetchDLSInfo()
327 +
328          except DataLocation.DataLocationError , ex:
329              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
330              raise CrabException(msg)
331  
332  
333 <        sites = dataloc.getSites()
333 >        unsorted_sites = dataloc.getSites()
334 >        sites = self.filesbyblock.fromkeys(self.filesbyblock,'')
335 >        for lfn in self.filesbyblock.keys():
336 >            if unsorted_sites.has_key(lfn):
337 >                sites[lfn]=unsorted_sites[lfn]
338 >            else:
339 >                sites[lfn]=[]
340 >
341 >        if len(sites)==0:
342 >            msg = 'ERROR ***: no location for any of the blocks of this dataset: \n\t %s \n'%datasetPath
343 >            msg += "\tMaybe the dataset is located only at T1's (or at T0), where analysis jobs are not allowed\n"
344 >            msg += "\tPlease check DataDiscovery page https://cmsweb.cern.ch/dbs_discovery/\n"
345 >            raise CrabException(msg)
346 >
347          allSites = []
348          listSites = sites.values()
349          for listSite in listSites:
350              for oneSite in listSite:
351                  allSites.append(oneSite)
352 <        allSites = self.uniquelist(allSites)
317 <
318 <        # screen output
319 <        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
320 <
321 <        return sites
322 <
323 <    def jobSplittingByBlocks(self, blockSites):
324 <        """
325 <        Perform job splitting. Jobs run over an integer number of files
326 <        and no more than one block.
327 <        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
328 <        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
329 <                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
330 <                  self.maxEvents, self.filesbyblock
331 <        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
332 <              self.total_number_of_jobs - Total # of jobs
333 <              self.list_of_args - File(s) job will run on (a list of lists)
334 <        """
335 <
336 <        # ---- Handle the possible job splitting configurations ---- #
337 <        if (self.selectTotalNumberEvents):
338 <            totalEventsRequested = self.total_number_of_events
339 <        if (self.selectEventsPerJob):
340 <            eventsPerJobRequested = self.eventsPerJob
341 <            if (self.selectNumberOfJobs):
342 <                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
343 <
344 <        # If user requested all the events in the dataset
345 <        if (totalEventsRequested == -1):
346 <            eventsRemaining=self.maxEvents
347 <        # If user requested more events than are in the dataset
348 <        elif (totalEventsRequested > self.maxEvents):
349 <            eventsRemaining = self.maxEvents
350 <            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
351 <        # If user requested less events than are in the dataset
352 <        else:
353 <            eventsRemaining = totalEventsRequested
354 <
355 <        # If user requested more events per job than are in the dataset
356 <        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
357 <            eventsPerJobRequested = self.maxEvents
358 <
359 <        # For user info at end
360 <        totalEventCount = 0
361 <
362 <        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
363 <            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
364 <
365 <        if (self.selectNumberOfJobs):
366 <            common.logger.message("May not create the exact number_of_jobs requested.")
367 <
368 <        if ( self.ncjobs == 'all' ) :
369 <            totalNumberOfJobs = 999999999
370 <        else :
371 <            totalNumberOfJobs = self.ncjobs
372 <
373 <        blocks = blockSites.keys()
374 <        blockCount = 0
375 <        # Backup variable in case self.maxEvents counted events in a non-included block
376 <        numBlocksInDataset = len(blocks)
377 <
378 <        jobCount = 0
379 <        list_of_lists = []
380 <
381 <        # list tracking which jobs are in which jobs belong to which block
382 <        jobsOfBlock = {}
383 <
384 <        # ---- Iterate over the blocks in the dataset until ---- #
385 <        # ---- we've met the requested total # of events    ---- #
386 <        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
387 <            block = blocks[blockCount]
388 <            blockCount += 1
389 <            if block not in jobsOfBlock.keys() :
390 <                jobsOfBlock[block] = []
391 <
392 <            if self.eventsbyblock.has_key(block) :
393 <                numEventsInBlock = self.eventsbyblock[block]
394 <                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
395 <
396 <                files = self.filesbyblock[block]
397 <                numFilesInBlock = len(files)
398 <                if (numFilesInBlock <= 0):
399 <                    continue
400 <                fileCount = 0
401 <
402 <                # ---- New block => New job ---- #
403 <                parString = ""
404 <                # counter for number of events in files currently worked on
405 <                filesEventCount = 0
406 <                # flag if next while loop should touch new file
407 <                newFile = 1
408 <                # job event counter
409 <                jobSkipEventCount = 0
410 <
411 <                # ---- Iterate over the files in the block until we've met the requested ---- #
412 <                # ---- total # of events or we've gone over all the files in this block  ---- #
413 <                pString=''
414 <                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
415 <                    file = files[fileCount]
416 <                    if self.useParent:
417 <                        parent = self.parentFiles[file]
418 <                        for f in parent :
419 <                            pString += '\\\"' + f + '\\\"\,'
420 <                        common.logger.debug(6, "File "+str(file)+" has the following parents: "+str(parent))
421 <                        common.logger.write("File "+str(file)+" has the following parents: "+str(parent))
422 <                    if newFile :
423 <                        try:
424 <                            numEventsInFile = self.eventsbyfile[file]
425 <                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
426 <                            # increase filesEventCount
427 <                            filesEventCount += numEventsInFile
428 <                            # Add file to current job
429 <                            parString += '\\\"' + file + '\\\"\,'
430 <                            newFile = 0
431 <                        except KeyError:
432 <                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
433 <
434 <                    eventsPerJobRequested = min(eventsPerJobRequested, eventsRemaining)
435 <                    # if less events in file remain than eventsPerJobRequested
436 <                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested):
437 <                        # if last file in block
438 <                        if ( fileCount == numFilesInBlock-1 ) :
439 <                            # end job using last file, use remaining events in block
440 <                            # close job and touch new file
441 <                            fullString = parString[:-2]
442 <                            if self.useParent:
443 <                                fullParentString = pString[:-2]
444 <                                list_of_lists.append([fullString,fullParentString,str(-1),str(jobSkipEventCount)])
445 <                            else:
446 <                                list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
447 <                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
448 <                            self.jobDestination.append(blockSites[block])
449 <                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
450 <                            # fill jobs of block dictionary
451 <                            jobsOfBlock[block].append(jobCount+1)
452 <                            # reset counter
453 <                            jobCount = jobCount + 1
454 <                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
455 <                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
456 <                            jobSkipEventCount = 0
457 <                            # reset file
458 <                            pString = ""
459 <                            parString = ""
460 <                            filesEventCount = 0
461 <                            newFile = 1
462 <                            fileCount += 1
463 <                        else :
464 <                            # go to next file
465 <                            newFile = 1
466 <                            fileCount += 1
467 <                    # if events in file equal to eventsPerJobRequested
468 <                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
469 <                        # close job and touch new file
470 <                        fullString = parString[:-2]
471 <                        if self.useParent:
472 <                            fullParentString = pString[:-2]
473 <                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
474 <                        else:
475 <                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
476 <                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
477 <                        self.jobDestination.append(blockSites[block])
478 <                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
479 <                        jobsOfBlock[block].append(jobCount+1)
480 <                        # reset counter
481 <                        jobCount = jobCount + 1
482 <                        totalEventCount = totalEventCount + eventsPerJobRequested
483 <                        eventsRemaining = eventsRemaining - eventsPerJobRequested
484 <                        jobSkipEventCount = 0
485 <                        # reset file
486 <                        pString = ""
487 <                        parString = ""
488 <                        filesEventCount = 0
489 <                        newFile = 1
490 <                        fileCount += 1
352 >        [allSites.append(it) for it in allSites if not allSites.count(it)]
353  
492                    # if more events in file remain than eventsPerJobRequested
493                    else :
494                        # close job but don't touch new file
495                        fullString = parString[:-2]
496                        if self.useParent:
497                            fullParentString = pString[:-2]
498                            list_of_lists.append([fullString,fullParentString,str(eventsPerJobRequested),str(jobSkipEventCount)])
499                        else:
500                            list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
501                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
502                        self.jobDestination.append(blockSites[block])
503                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
504                        jobsOfBlock[block].append(jobCount+1)
505                        # increase counter
506                        jobCount = jobCount + 1
507                        totalEventCount = totalEventCount + eventsPerJobRequested
508                        eventsRemaining = eventsRemaining - eventsPerJobRequested
509                        # calculate skip events for last file
510                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
511                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
512                        # remove all but the last file
513                        filesEventCount = self.eventsbyfile[file]
514                        if self.useParent:
515                            for f in parent : pString += '\\\"' + f + '\\\"\,'
516                        parString = '\\\"' + file + '\\\"\,'
517                    pass # END if
518                pass # END while (iterate over files in the block)
519        pass # END while (iterate over blocks in the dataset)
520        self.ncjobs = self.total_number_of_jobs = jobCount
521        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
522            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
523        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
354  
355          # screen output
356 <        screenOutput = "List of jobs and available destination sites:\n\n"
527 <
528 <        # keep trace of block with no sites to print a warning at the end
529 <        noSiteBlock = []
530 <        bloskNoSite = []
531 <
532 <        blockCounter = 0
533 <        for block in blocks:
534 <            if block in jobsOfBlock.keys() :
535 <                blockCounter += 1
536 <                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),
537 <                    ','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
538 <                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
539 <                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
540 <                    bloskNoSite.append( blockCounter )
541 <
542 <        common.logger.message(screenOutput)
543 <        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
544 <            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
545 <            virgola = ""
546 <            if len(bloskNoSite) > 1:
547 <                virgola = ","
548 <            for block in bloskNoSite:
549 <                msg += ' ' + str(block) + virgola
550 <            msg += '\n               Related jobs:\n                 '
551 <            virgola = ""
552 <            if len(noSiteBlock) > 1:
553 <                virgola = ","
554 <            for range_jobs in noSiteBlock:
555 <                msg += str(range_jobs) + virgola
556 <            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
557 <            if self.cfg_params.has_key('EDG.se_white_list'):
558 <                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
559 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
560 <                msg += 'Please check if the dataset is available at this site!)\n'
561 <            if self.cfg_params.has_key('EDG.ce_white_list'):
562 <                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
563 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
564 <                msg += 'Please check if the dataset is available at this site!)\n'
565 <
566 <            common.logger.message(msg)
567 <
568 <        self.list_of_args = list_of_lists
569 <        return
570 <
571 <    def jobSplittingNoInput(self):
572 <        """
573 <        Perform job splitting based on number of event per job
574 <        """
575 <        common.logger.debug(5,'Splitting per events')
576 <
577 <        if (self.selectEventsPerJob):
578 <            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
579 <        if (self.selectNumberOfJobs):
580 <            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
581 <        if (self.selectTotalNumberEvents):
582 <            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
583 <
584 <        if (self.total_number_of_events < 0):
585 <            msg='Cannot split jobs per Events with "-1" as total number of events'
586 <            raise CrabException(msg)
356 >        common.logger.info("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
357  
358 <        if (self.selectEventsPerJob):
589 <            if (self.selectTotalNumberEvents):
590 <                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
591 <            elif(self.selectNumberOfJobs) :
592 <                self.total_number_of_jobs =self.theNumberOfJobs
593 <                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
594 <
595 <        elif (self.selectNumberOfJobs) :
596 <            self.total_number_of_jobs = self.theNumberOfJobs
597 <            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
598 <
599 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
600 <
601 <        # is there any remainder?
602 <        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
603 <
604 <        common.logger.debug(5,'Check  '+str(check))
605 <
606 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
607 <        if check > 0:
608 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
609 <
610 <        # argument is seed number.$i
611 <        self.list_of_args = []
612 <        for i in range(self.total_number_of_jobs):
613 <            ## Since there is no input, any site is good
614 <            self.jobDestination.append([""]) #must be empty to write correctly the xml
615 <            args=[]
616 <            if (self.firstRun):
617 <                ## pythia first run
618 <                args.append(str(self.firstRun)+str(i))
619 <            self.list_of_args.append(args)
620 <
621 <        return
622 <
623 <
624 <    def jobSplittingForScript(self):
625 <        """
626 <        Perform job splitting based on number of job
627 <        """
628 <        common.logger.debug(5,'Splitting per job')
629 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
358 >        return sites
359  
631        self.total_number_of_jobs = self.theNumberOfJobs
360  
361 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
361 >    def split(self, jobParams,firstJobID):
362  
363 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
363 >        jobParams = self.dict['args']
364 >        njobs = self.dict['njobs']
365 >        self.jobDestination = self.dict['jobDestination']
366  
367 <        # argument is seed number.$i
368 <        self.list_of_args = []
639 <        for i in range(self.total_number_of_jobs):
640 <            self.jobDestination.append([""])
641 <            self.list_of_args.append([str(i)])
642 <        return
367 >        if njobs==0:
368 >            raise CrabException("Ask to split "+str(njobs)+" jobs: aborting")
369  
644    def split(self, jobParams):
645
646        njobs = self.total_number_of_jobs
647        arglist = self.list_of_args
370          # create the empty structure
371          for i in range(njobs):
372              jobParams.append("")
373  
374          listID=[]
375          listField=[]
376 <        for job in range(njobs):
377 <            jobParams[job] = arglist[job]
376 >        listDictions=[]
377 >        exist= os.path.exists(self.argsFile)
378 >        for id in range(njobs):
379 >            job = id + int(firstJobID)
380              listID.append(job+1)
381              job_ToSave ={}
382              concString = ' '
383              argu=''
384 <            if len(jobParams[job]):
385 <                argu +=   concString.join(jobParams[job] )
386 <            job_ToSave['arguments']= str(job+1)+' '+argu
387 <            job_ToSave['dlsDestination']= self.jobDestination[job]
384 >            str_argu = str(job+1)
385 >            if len(jobParams[id]):
386 >                argu = {'JobID': job+1}
387 >                for i in range(len(jobParams[id])):
388 >                    argu[self.dict['params'][i]]=jobParams[id][i]
389 >                # just for debug
390 >                str_argu += concString.join(jobParams[id])
391 >            if argu != '': listDictions.append(argu)
392 >            job_ToSave['arguments']= str(job+1)
393 >            job_ToSave['dlsDestination']= self.jobDestination[id]
394              listField.append(job_ToSave)
395 <            msg="Job "+str(job)+" Arguments:   "+str(job+1)+" "+argu+"\n"  \
396 <            +"                     Destination: "+str(self.jobDestination[job])
397 <            common.logger.debug(5,msg)
395 >            from ProdCommon.SiteDB.CmsSiteMapper import CmsSEMap
396 >            cms_se = CmsSEMap()
397 >            msg="Job  %s  Arguments:  %s\n"%(str(job+1),str_argu)
398 >            msg+="\t  Destination: %s "%(str(self.jobDestination[id]))
399 >            SEDestination = [cms_se[dest] for dest in self.jobDestination[id]]
400 >            msg+="\t  CMSDestination: %s "%(str(SEDestination))
401 >            common.logger.log(10-1,msg)
402 >        # write xml
403 >        if len(listDictions):
404 >            if exist==False: self.CreateXML()
405 >            self.addEntry(listDictions)
406 >            self.addXMLfile()
407          common._db.updateJob_(listID,listField)
408 <        self.argsList = (len(jobParams[0])+1)
408 >        self.zipTarFile()
409 >        return
410  
411 +    def addXMLfile(self):
412 +
413 +        import tarfile
414 +        try:
415 +            tar = tarfile.open(self.tarNameWithPath, "a")
416 +            tar.add(self.argsFile, os.path.basename(self.argsFile))
417 +            tar.close()
418 +        except IOError, exc:
419 +            msg = 'Could not add %s to %s \n'%(self.argsFile,self.tarNameWithPath)
420 +            msg += str(exc)
421 +            raise CrabException(msg)
422 +        except tarfile.TarError, exc:
423 +            msg = 'Could not add %s to %s \n'%(self.argsFile,self.tarNameWithPath)
424 +            msg += str(exc)
425 +            raise CrabException(msg)
426 +
427 +    def CreateXML(self):
428 +        """
429 +        """
430 +        result = IMProvNode( self.rootArgsFilename )
431 +        outfile = file( self.argsFile, 'w').write(str(result))
432 +        return
433 +
434 +    def addEntry(self, listDictions):
435 +        """
436 +        _addEntry_
437 +
438 +        add an entry to the xml file
439 +        """
440 +        from IMProv.IMProvLoader import loadIMProvFile
441 +        ## load xml
442 +        improvDoc = loadIMProvFile(self.argsFile)
443 +        entrname= 'Job'
444 +        for dictions in listDictions:
445 +           report = IMProvNode(entrname , None, **dictions)
446 +           improvDoc.addNode(report)
447 +        outfile = file( self.argsFile, 'w').write(str(improvDoc))
448          return
449  
450      def numberOfJobs(self):
451 <        return self.total_number_of_jobs
451 >        return self.dict['njobs']
452  
453      def getTarBall(self, exe):
454          """
455          Return the TarBall with lib and exe
456          """
457 <        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
458 <        if os.path.exists(self.tgzNameWithPath):
459 <            return self.tgzNameWithPath
457 >        self.tarNameWithPath = common.work_space.pathForTgz()+self.tar_name
458 >        if os.path.exists(self.tarNameWithPath):
459 >            return self.tarNameWithPath
460  
461          # Prepare a tar gzipped file with user binaries.
462          self.buildTar_(exe)
463  
464 <        return string.strip(self.tgzNameWithPath)
464 >        return string.strip(self.tarNameWithPath)
465  
466      def buildTar_(self, executable):
467  
# Line 694 | Line 471 | class Cmssw(JobType):
471  
472          ## check if working area is release top
473          if swReleaseTop == '' or swArea == swReleaseTop:
474 <            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
474 >            common.logger.debug("swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
475              return
476  
477          import tarfile
478          try: # create tar ball
479 <            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
479 >            #tar = tarfile.open(self.tgzNameWithPath, "w:gz")
480 >            tar = tarfile.open(self.tarNameWithPath, "w")
481              ## First find the executable
482              if (self.executable != ''):
483                  exeWithPath = self.scram.findFile_(executable)
# Line 709 | Line 487 | class Cmssw(JobType):
487                  ## then check if it's private or not
488                  if exeWithPath.find(swReleaseTop) == -1:
489                      # the exe is private, so we must ship
490 <                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
490 >                    common.logger.debug("Exe "+exeWithPath+" to be tarred")
491                      path = swArea+'/'
492                      # distinguish case when script is in user project area or given by full path somewhere else
493                      if exeWithPath.find(path) >= 0 :
# Line 723 | Line 501 | class Cmssw(JobType):
501                      pass
502  
503              ## Now get the libraries: only those in local working area
504 +            tar.dereference=True
505              libDir = 'lib'
506              lib = swArea+'/' +libDir
507 <            common.logger.debug(5,"lib "+lib+" to be tarred")
507 >            common.logger.debug("lib "+lib+" to be tarred")
508              if os.path.exists(lib):
509                  tar.add(lib,libDir)
510  
# Line 734 | Line 513 | class Cmssw(JobType):
513              module = swArea + '/' + moduleDir
514              if os.path.isdir(module):
515                  tar.add(module,moduleDir)
516 +            tar.dereference=False
517  
518              ## Now check if any data dir(s) is present
739            swAreaLen=len(swArea)
519              self.dataExist = False
520 <            for root, dirs, files in os.walk(swArea):
521 <                if "data" in dirs:
522 <                    self.dataExist=True
523 <                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
524 <                    tar.add(root+"/data",root[swAreaLen:]+"/data")
520 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
521 >            while len(todo_list):
522 >                entry, name = todo_list.pop()
523 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
524 >                    continue
525 >                if os.path.isdir(swArea+"/src/"+entry):
526 >                    entryPath = entry + '/'
527 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
528 >                    if name == 'data':
529 >                        self.dataExist=True
530 >                        common.logger.debug("data "+entry+" to be tarred")
531 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
532 >                    pass
533 >                pass
534  
535              ### CMSSW ParameterSet
536              if not self.pset is None:
537                  cfg_file = common.work_space.jobDir()+self.configFilename()
538                  tar.add(cfg_file,self.configFilename())
751                common.logger.debug(5,"File added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
539  
540 +            try:
541 +                crab_cfg_file = common.work_space.shareDir()+'/crab.cfg'
542 +                tar.add(crab_cfg_file,'crab.cfg')
543 +            except:
544 +                pass
545  
546              ## Add ProdCommon dir to tar
547 <            prodcommonDir = 'ProdCommon'
547 >            prodcommonDir = './'
548              prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
549 <            if os.path.isdir(prodcommonPath):
550 <                tar.add(prodcommonPath,prodcommonDir)
551 <            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
549 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools', \
550 >                           'ProdCommon/Core', 'ProdCommon/MCPayloads', 'IMProv', 'ProdCommon/Storage', \
551 >                           'WMCore/__init__.py','WMCore/Algorithms']
552 >            for file in neededStuff:
553 >                tar.add(prodcommonPath+file,prodcommonDir+file)
554  
555              ##### ML stuff
556              ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
557              path=os.environ['CRABDIR'] + '/python/'
558              for file in ML_file_list:
559                  tar.add(path+file,file)
766            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
560  
561              ##### Utils
562 <            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py']
562 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py','cmscp.py']
563              for file in Utils_file_list:
564                  tar.add(path+file,file)
772            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
565  
566              ##### AdditionalFiles
567 +            tar.dereference=True
568              for file in self.additional_inbox_files:
569                  tar.add(file,string.split(file,'/')[-1])
570 <            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
570 >            tar.dereference=False
571 >            common.logger.log(10-1,"Files in "+self.tarNameWithPath+" : "+str(tar.getnames()))
572  
573              tar.close()
574 <        except :
575 <            raise CrabException('Could not create tar-ball')
574 >        except IOError, exc:
575 >            msg = 'Could not create tar-ball %s \n'%self.tarNameWithPath
576 >            msg += str(exc)
577 >            raise CrabException(msg)
578 >        except tarfile.TarError, exc:
579 >            msg = 'Could not create tar-ball %s \n'%self.tarNameWithPath
580 >            msg += str(exc)
581 >            raise CrabException(msg)
582 >
583 >    def zipTarFile(self):
584 >
585 >        cmd = "gzip -c %s > %s "%(self.tarNameWithPath,self.tgzNameWithPath)
586 >        res=runCommand(cmd)
587  
783        ## check for tarball size
588          tarballinfo = os.stat(self.tgzNameWithPath)
589          if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
590 <            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
590 >            msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) \
591 >               +'MB input sandbox limit \n'
592 >            msg += '      and not supported by the direct GRID submission system.\n'
593 >            msg += '      Please use the CRAB server mode by setting server_name=<NAME> in section [CRAB] of your crab.cfg.\n'
594 >            msg += '      For further infos please see https://twiki.cern.ch/twiki/bin/view/CMS/CrabServer#CRABSERVER_for_Users'
595 >            raise CrabException(msg)
596  
597          ## create tar-ball with ML stuff
598  
# Line 792 | Line 601 | class Cmssw(JobType):
601          Returns part of a job script which prepares
602          the execution environment for the job 'nj'.
603          """
604 +        # FUTURE: Drop support for .cfg when possible
605          if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
606              psetName = 'pset.py'
607          else:
# Line 799 | Line 609 | class Cmssw(JobType):
609          # Prepare JobType-independent part
610          txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
611          txt += 'echo ">>> setup environment"\n'
612 <        txt += 'if [ $middleware == LCG ]; then \n'
612 >        txt += 'if [ $middleware == LCG ] || [ $middleware == CAF ] || [ $middleware == LSF ]; then \n'
613          txt += self.wsSetupCMSLCGEnvironment_()
614          txt += 'elif [ $middleware == OSG ]; then\n'
615          txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
# Line 814 | Line 624 | class Cmssw(JobType):
624          txt += '    cd $WORKING_DIR\n'
625          txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
626          txt += self.wsSetupCMSOSGEnvironment_()
627 +        #Setup SGE Environment
628 +        txt += 'elif [ $middleware == SGE ]; then\n'
629 +        txt += self.wsSetupCMSLCGEnvironment_()
630 +
631 +        txt += 'elif [ $middleware == ARC ]; then\n'
632 +        txt += self.wsSetupCMSLCGEnvironment_()
633 +
634          txt += 'fi\n'
635  
636          # Prepare JobType-specific part
# Line 829 | Line 646 | class Cmssw(JobType):
646          txt += '    func_exit\n'
647          txt += 'fi \n'
648          txt += 'cd '+self.version+'\n'
649 <        txt += 'SOFTWARE_DIR=`pwd`\n'
649 >        txt += 'SOFTWARE_DIR=`pwd`; export SOFTWARE_DIR\n'
650          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
651          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
652          txt += 'if [ $? != 0 ] ; then\n'
# Line 853 | Line 670 | class Cmssw(JobType):
670          # Prepare job-specific part
671          job = common.job_list[nj]
672          if (self.datasetPath):
673 +            self.primaryDataset = self.datasetPath.split("/")[1]
674 +            DataTier = self.datasetPath.split("/")[2]
675              txt += '\n'
676              txt += 'DatasetPath='+self.datasetPath+'\n'
677  
678 <            datasetpath_split = self.datasetPath.split("/")
679 <
861 <            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
862 <            txt += 'DataTier='+datasetpath_split[2]+'\n'
678 >            txt += 'PrimaryDataset='+self.primaryDataset +'\n'
679 >            txt += 'DataTier='+DataTier+'\n'
680              txt += 'ApplicationFamily=cmsRun\n'
681  
682          else:
683 +            self.primaryDataset = 'null'
684              txt += 'DatasetPath=MCDataTier\n'
685              txt += 'PrimaryDataset=null\n'
686              txt += 'DataTier=null\n'
# Line 871 | Line 689 | class Cmssw(JobType):
689              pset = os.path.basename(job.configFilename())
690              txt += '\n'
691              txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
874            if (self.datasetPath): # standard job
875                txt += 'InputFiles=${args[1]}; export InputFiles\n'
876                if (self.useParent):  
877                    txt += 'ParentFiles=${args[2]}; export ParentFiles\n'
878                    txt += 'MaxEvents=${args[3]}; export MaxEvents\n'
879                    txt += 'SkipEvents=${args[4]}; export SkipEvents\n'
880                else:
881                    txt += 'MaxEvents=${args[2]}; export MaxEvents\n'
882                    txt += 'SkipEvents=${args[3]}; export SkipEvents\n'
883                txt += 'echo "Inputfiles:<$InputFiles>"\n'
884                if (self.useParent): txt += 'echo "ParentFiles:<$ParentFiles>"\n'
885                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
886                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
887            else:  # pythia like job
888                txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
889                txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
890                txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
891                txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
892                if (self.firstRun):
893                    txt += 'FirstRun=${args[1]}; export FirstRun\n'
894                    txt += 'echo "FirstRun: <$FirstRun>"\n'
692  
693 <            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
693 >            txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
694 >            txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
695 >            txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
696 >            txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
697  
698 <
699 <        if self.pset != None:
900 <            # FUTURE: Can simply for 2_1_x and higher
901 <            txt += '\n'
902 <            if self.debug_wrapper==True:
903 <                txt += 'echo "***** cat ' + psetName + ' *********"\n'
904 <                txt += 'cat ' + psetName + '\n'
905 <                txt += 'echo "****** end ' + psetName + ' ********"\n'
906 <                txt += '\n'
907 <            txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
908 <            txt += 'echo "PSETHASH = $PSETHASH" \n'
698 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
699 >        else:
700              txt += '\n'
701 +            txt += 'export AdditionalArgs=%s\n'%(self.AdditionalArgs)
702 +
703          return txt
704  
705      def wsUntarSoftware(self, nj=0):
# Line 919 | Line 712 | class Cmssw(JobType):
712  
713          if os.path.isfile(self.tgzNameWithPath):
714              txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
715 <            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
716 <            if  self.debug_wrapper:
715 >            txt += 'tar zxvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
716 >            if  self.debug_wrapper==1 :
717                  txt += 'ls -Al \n'
718              txt += 'untar_status=$? \n'
719              txt += 'if [ $untar_status -ne 0 ]; then \n'
# Line 931 | Line 724 | class Cmssw(JobType):
724              txt += '   echo "Successful untar" \n'
725              txt += 'fi \n'
726              txt += '\n'
727 <            txt += 'echo ">>> Include ProdCommon in PYTHONPATH:"\n'
727 >            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
728              txt += 'if [ -z "$PYTHONPATH" ]; then\n'
729 <            txt += '   export PYTHONPATH=$RUNTIME_AREA/ProdCommon\n'
729 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
730              txt += 'else\n'
731 <            txt += '   export PYTHONPATH=$RUNTIME_AREA/ProdCommon:${PYTHONPATH}\n'
731 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
732              txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
733              txt += 'fi\n'
734              txt += '\n'
# Line 962 | Line 755 | class Cmssw(JobType):
755          if len(self.additional_inbox_files)>0:
756              for file in self.additional_inbox_files:
757                  txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
758 <        txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
758 >        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
759 >        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
760  
761 +        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
762          txt += 'if [ -z "$PYTHONPATH" ]; then\n'
763 <        txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdCommon\n'
763 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
764          txt += 'else\n'
765 <        txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n'
765 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
766          txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
767          txt += 'fi\n'
768          txt += '\n'
769  
770 +        if self.pset != None:
771 +            # FUTURE: Drop support for .cfg when possible
772 +            if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
773 +                psetName = 'pset.py'
774 +            else:
775 +                psetName = 'pset.cfg'
776 +            # FUTURE: Can simply for 2_1_x and higher
777 +            txt += '\n'
778 +            if self.debug_wrapper == 1:
779 +                txt += 'echo "***** cat ' + psetName + ' *********"\n'
780 +                txt += 'cat ' + psetName + '\n'
781 +                txt += 'echo "****** end ' + psetName + ' ********"\n'
782 +                txt += '\n'
783 +                txt += 'echo "***********************" \n'
784 +                txt += 'which edmConfigHash \n'
785 +                txt += 'echo "***********************" \n'
786 +            if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
787 +                txt += 'edmConfigHash ' + psetName + ' \n'
788 +                txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
789 +            else:
790 +                txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
791 +            txt += 'echo "PSETHASH = $PSETHASH" \n'
792 +            #### FEDE temporary fix for noEdm files #####
793 +            txt += 'if [ -z "$PSETHASH" ]; then \n'
794 +            txt += '   export PSETHASH=null\n'
795 +            txt += 'fi \n'
796 +            #############################################
797 +            txt += '\n'
798          return txt
799  
977    def modifySteeringCards(self, nj):
978        """
979        modify the card provided by the user,
980        writing a new card into share dir
981        """
800  
801      def executableName(self):
802          if self.scriptExe:
# Line 988 | Line 806 | class Cmssw(JobType):
806  
807      def executableArgs(self):
808          # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
809 <        if self.scriptExe:#CarlosDaniele
810 <            return   self.scriptExe + " $NJob"
809 >        if self.scriptExe:
810 >            return self.scriptExe + " $NJob $AdditionalArgs"
811          else:
812              ex_args = ""
813 <            # FUTURE: This tests the CMSSW version. Can remove code as versions deprecated
814 <            # Framework job report
997 <            if (self.CMSSW_major >= 1 and self.CMSSW_minor >= 5) or (self.CMSSW_major >= 2):
998 <                ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
999 <            # Type of config file
813 >            ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
814 >            # Type of config file depends on CMSSW version
815              if self.CMSSW_major >= 2 :
816                  ex_args += " -p pset.py"
817              else:
# Line 1010 | Line 825 | class Cmssw(JobType):
825          inp_box = []
826          if os.path.isfile(self.tgzNameWithPath):
827              inp_box.append(self.tgzNameWithPath)
828 <        wrapper = os.path.basename(str(common._db.queryTask('scriptName')))
1014 <        inp_box.append(common.work_space.pathForTgz() +'job/'+ wrapper)
828 >        inp_box.append(common.work_space.jobDir() + self.scriptName)
829          return inp_box
830  
831      def outputSandbox(self, nj):
# Line 1023 | Line 837 | class Cmssw(JobType):
837          ## User Declared output files
838          for out in (self.output_file+self.output_file_sandbox):
839              n_out = nj + 1
840 <            out_box.append(self.numberFile_(out,str(n_out)))
840 >            out_box.append(numberFile(out,str(n_out)))
841          return out_box
842  
1029    def prepareSteeringCards(self):
1030        """
1031        Make initial modifications of the user's steering card file.
1032        """
1033        return
843  
844      def wsRenameOutput(self, nj):
845          """
# Line 1040 | Line 849 | class Cmssw(JobType):
849          txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
850          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
851          txt += 'echo ">>> current directory content:"\n'
852 <        if self.debug_wrapper:
852 >        if self.debug_wrapper==1:
853              txt += 'ls -Al\n'
854          txt += '\n'
855  
856          for fileWithSuffix in (self.output_file):
857 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
857 >            output_file_num = numberFile(fileWithSuffix, '$NJob')
858              txt += '\n'
859              txt += '# check output file\n'
860              txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
861              if (self.copy_data == 1):  # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA
862                  txt += '    mv '+fileWithSuffix+' '+output_file_num+'\n'
863 <                #txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
863 >                txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
864              else:
865                  txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
866                  txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
# Line 1066 | Line 875 | class Cmssw(JobType):
875              txt += 'fi\n'
876          file_list = []
877          for fileWithSuffix in (self.output_file):
878 <             file_list.append(self.numberFile_(fileWithSuffix, '$NJob'))
878 >             file_list.append(numberFile('$SOFTWARE_DIR/'+fileWithSuffix, '$NJob'))
879  
880 <        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
880 >        txt += 'file_list="'+string.join(file_list,',')+'"\n'
881          txt += '\n'
882          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
883          txt += 'echo ">>> current directory content:"\n'
884 <        if self.debug_wrapper:
884 >        if self.debug_wrapper==1:
885              txt += 'ls -Al\n'
886          txt += '\n'
887          txt += 'cd $RUNTIME_AREA\n'
888          txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
889          return txt
890  
1082    def numberFile_(self, file, txt):
1083        """
1084        append _'txt' before last extension of a file
1085        """
1086        p = string.split(file,".")
1087        # take away last extension
1088        name = p[0]
1089        for x in p[1:-1]:
1090            name=name+"."+x
1091        # add "_txt"
1092        if len(p)>1:
1093            ext = p[len(p)-1]
1094            result = name + '_' + txt + "." + ext
1095        else:
1096            result = name + '_' + txt
1097
1098        return result
1099
891      def getRequirements(self, nj=[]):
892          """
893          return job requirements to add to jdl files
# Line 1112 | Line 903 | class Cmssw(JobType):
903                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
904  
905          req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
906 <        if common.scheduler.name() == "glitecoll":
906 >        if ( common.scheduler.name() == "glitecoll" ) or ( common.scheduler.name() == "glite"):
907              req += ' && other.GlueCEStateStatus == "Production" '
908  
909          return req
# Line 1183 | Line 974 | class Cmssw(JobType):
974          txt += '    echo "==> setup cms environment ok"\n'
975          return txt
976  
977 <    def modifyReport(self, nj):
977 >    def wsModifyReport(self, nj):
978          """
979          insert the part of the script that modifies the FrameworkJob Report
980          """
981 <        txt = '\n#Written by cms_cmssw::modifyReport\n'
981 >
982 >        txt = ''
983          publish_data = int(self.cfg_params.get('USER.publish_data',0))
984 <        if (publish_data == 1):
985 <            processedDataset = self.cfg_params['USER.publish_data_name']
986 <            LFNBaseName = LFNBase(processedDataset)
984 >        #if (publish_data == 1):
985 >        if (self.copy_data == 1):
986 >            txt = '\n#Written by cms_cmssw::wsModifyReport\n'
987 >            publish_data = int(self.cfg_params.get('USER.publish_data',0))
988  
989 <            txt += 'if [ $copy_exit_status -eq 0 ]; then\n'
990 <            txt += '    FOR_LFN=%s_${PSETHASH}/\n'%(LFNBaseName)
989 >
990 >            txt += 'if [ $StageOutExitStatus -eq 0 ]; then\n'
991 >            txt += '    FOR_LFN=$LFNBaseName\n'
992              txt += 'else\n'
993              txt += '    FOR_LFN=/copy_problems/ \n'
1200            txt += '    SE=""\n'
1201            txt += '    SE_PATH=""\n'
994              txt += 'fi\n'
995  
996              txt += 'echo ">>> Modify Job Report:" \n'
997 <            txt += 'chmod a+x $SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1206 <            txt += 'ProcessedDataset='+processedDataset+'\n'
1207 <            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
997 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
998              txt += 'echo "SE = $SE"\n'
999              txt += 'echo "SE_PATH = $SE_PATH"\n'
1000              txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1001              txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1002 <            txt += 'echo "$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1003 <            txt += '$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py $RUNTIME_AREA/crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1002 >
1003 >
1004 >            args = 'fjr $RUNTIME_AREA/crab_fjr_$NJob.xml n_job $NJob for_lfn $FOR_LFN PrimaryDataset $PrimaryDataset  ApplicationFamily $ApplicationFamily ApplicationName $executable cmssw_version $CMSSW_VERSION psethash $PSETHASH se_name $SE se_path $SE_PATH'
1005 >            if (publish_data == 1):
1006 >                processedDataset = self.cfg_params['USER.publish_data_name']
1007 >                txt += 'ProcessedDataset='+processedDataset+'\n'
1008 >                txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1009 >                args += ' UserProcessedDataset $USER-$ProcessedDataset-$PSETHASH'
1010 >
1011 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'"\n'
1012 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'\n'
1013              txt += 'modifyReport_result=$?\n'
1014              txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1015              txt += '    modifyReport_result=70500\n'
# Line 1231 | Line 1030 | class Cmssw(JobType):
1030          txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1031          txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1032          txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1033 <        if self.debug_wrapper :
1033 >        if self.debug_wrapper==1 :
1034              txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1035          txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1036          txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1037          txt += '            echo ">>> crab_fjr.xml contents: "\n'
1038 <        txt += '            cat $RUNTIME_AREA/crab_fjr_NJob.xml\n'
1038 >        txt += '            cat $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1039          txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1040          txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1041          txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
# Line 1247 | Line 1046 | class Cmssw(JobType):
1046          txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1047          txt += '    fi\n'
1048            #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1049 <
1050 <        if self.datasetPath:
1049 >        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1050 >        txt += '        echo ">>> Executable succeded  $executable_exit_status"\n'
1051 >        ## This cannot more work given the changes on the Job argumentsJob
1052 >        """
1053 >        if (self.datasetPath and not (self.dataset_pu or self.useParent==1)) :
1054            # VERIFY PROCESSED DATA
1055 <            txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1056 <            txt += '      echo ">>> Verify list of processed files:"\n'
1057 <            txt += '      echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1058 <            txt += '      python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1059 <            txt += '      cat input-files.txt  | sort | uniq > tmp.txt\n'
1060 <            txt += '      mv tmp.txt input-files.txt\n'
1061 <            txt += '      echo "cat input-files.txt"\n'
1062 <            txt += '      echo "----------------------"\n'
1063 <            txt += '      cat input-files.txt\n'
1064 <            txt += '      cat processed-files.txt | sort | uniq > tmp.txt\n'
1065 <            txt += '      mv tmp.txt processed-files.txt\n'
1066 <            txt += '      echo "----------------------"\n'
1067 <            txt += '      echo "cat processed-files.txt"\n'
1068 <            txt += '      echo "----------------------"\n'
1069 <            txt += '      cat processed-files.txt\n'
1070 <            txt += '      echo "----------------------"\n'
1071 <            txt += '      diff -q input-files.txt processed-files.txt\n'
1072 <            txt += '      fileverify_status=$?\n'
1073 <            txt += '      if [ $fileverify_status -ne 0 ]; then\n'
1074 <            txt += '         executable_exit_status=30001\n'
1075 <            txt += '         echo "ERROR ==> not all input files processed"\n'
1076 <            txt += '         echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1077 <            txt += '         echo "      ==> diff input-files.txt processed-files.txt"\n'
1078 <            txt += '      fi\n'
1079 <            txt += '    fi\n'
1278 <            txt += '\n'
1055 >            txt += '        echo ">>> Verify list of processed files:"\n'
1056 >            txt += '        echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1057 >            txt += '        python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1058 >            txt += '        cat input-files.txt  | sort | uniq > tmp.txt\n'
1059 >            txt += '        mv tmp.txt input-files.txt\n'
1060 >            txt += '        echo "cat input-files.txt"\n'
1061 >            txt += '        echo "----------------------"\n'
1062 >            txt += '        cat input-files.txt\n'
1063 >            txt += '        cat processed-files.txt | sort | uniq > tmp.txt\n'
1064 >            txt += '        mv tmp.txt processed-files.txt\n'
1065 >            txt += '        echo "----------------------"\n'
1066 >            txt += '        echo "cat processed-files.txt"\n'
1067 >            txt += '        echo "----------------------"\n'
1068 >            txt += '        cat processed-files.txt\n'
1069 >            txt += '        echo "----------------------"\n'
1070 >            txt += '        diff -qbB input-files.txt processed-files.txt\n'
1071 >            txt += '        fileverify_status=$?\n'
1072 >            txt += '        if [ $fileverify_status -ne 0 ]; then\n'
1073 >            txt += '            executable_exit_status=30001\n'
1074 >            txt += '            echo "ERROR ==> not all input files processed"\n'
1075 >            txt += '            echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1076 >            txt += '            echo "      ==> diff input-files.txt processed-files.txt"\n'
1077 >            txt += '        fi\n'
1078 >        """
1079 >        txt += '    fi\n'
1080          txt += 'else\n'
1081          txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1082          txt += 'fi\n'
1083          txt += '\n'
1084 +        txt += 'if [ $executable_exit_status -ne 0 ] && [ $executable_exit_status -ne 50115 ] && [ $executable_exit_status -ne 50117 ] && [ $executable_exit_status -ne 30001 ];then\n'
1085 +        txt += '    echo ">>> Executable failed  $executable_exit_status"\n'
1086 +        txt += '    echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1087 +        txt += '    echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1088 +        txt += '    job_exit_code=$executable_exit_status\n'
1089 +        txt += '    func_exit\n'
1090 +        txt += 'fi\n\n'
1091          txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1092          txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1093          txt += 'job_exit_code=$executable_exit_status\n'
# Line 1292 | Line 1100 | class Cmssw(JobType):
1100      def getParams(self):
1101          return self._params
1102  
1103 <    def uniquelist(self, old):
1296 <        """
1297 <        remove duplicates from a list
1298 <        """
1299 <        nd={}
1300 <        for e in old:
1301 <            nd[e]=0
1302 <        return nd.keys()
1303 <
1304 <    def outList(self):
1103 >    def outList(self,list=False):
1104          """
1105          check the dimension of the output files
1106          """
# Line 1310 | Line 1109 | class Cmssw(JobType):
1109          listOutFiles = []
1110          stdout = 'CMSSW_$NJob.stdout'
1111          stderr = 'CMSSW_$NJob.stderr'
1112 +        if len(self.output_file) <= 0:
1113 +            msg ="WARNING: no output files name have been defined!!\n"
1114 +            msg+="\tno output files will be reported back/staged\n"
1115 +            common.logger.info(msg)
1116          if (self.return_data == 1):
1117              for file in (self.output_file+self.output_file_sandbox):
1118 <                listOutFiles.append(self.numberFile_(file, '$NJob'))
1118 >                listOutFiles.append(numberFile(file, '$NJob'))
1119              listOutFiles.append(stdout)
1120              listOutFiles.append(stderr)
1121          else:
1122              for file in (self.output_file_sandbox):
1123 <                listOutFiles.append(self.numberFile_(file, '$NJob'))
1123 >                listOutFiles.append(numberFile(file, '$NJob'))
1124              listOutFiles.append(stdout)
1125              listOutFiles.append(stderr)
1126          txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1127          txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1128          txt += 'export filesToCheck\n'
1129 +
1130 +        if list : return self.output_file
1131          return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines