ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.155 by slacapra, Tue Feb 12 15:20:47 2008 UTC vs.
Revision 1.300 by spiga, Wed May 20 13:51:45 2009 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 from BlackWhiteListParser import BlackWhiteListParser
5   import common
6   import Scram
7 + from Splitter import JobSplitter
8  
9 + from IMProv.IMProvNode import IMProvNode
10   import os, string, glob
11  
12   class Cmssw(JobType):
13 <    def __init__(self, cfg_params, ncjobs):
13 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
14          JobType.__init__(self, 'CMSSW')
15          common.logger.debug(3,'CMSSW::__init__')
16 <
17 <        self.argsList = []
16 >        self.skip_blocks = skip_blocks
17 >        self.argsList = 1
18  
19          self._params = {}
20          self.cfg_params = cfg_params
20        # init BlackWhiteListParser
21        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
21  
22 <        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
22 >        ### Temporary patch to automatically skip the ISB size check:
23 >        server=self.cfg_params.get('CRAB.server_name',None)
24 >        size = 9.5
25 >        if server or common.scheduler.name().upper() in ['LSF','CAF']: size = 99999
26 >        ### D.S.
27 >        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',size))
28  
29          # number of jobs requested to be created, limit obj splitting
30          self.ncjobs = ncjobs
# Line 32 | Line 36 | class Cmssw(JobType):
36          self.scriptExe = ''
37          self.executable = ''
38          self.executable_arch = self.scram.getArch()
39 <        self.tgz_name = 'default.tgz'
40 <        self.additional_tgz_name = 'additional.tgz'
39 >        self.tgz_name = 'default.tar.gz'
40 >        self.tar_name = 'default.tar'
41          self.scriptName = 'CMSSW.sh'
42 <        self.pset = ''      #scrip use case Da
43 <        self.datasetPath = '' #scrip use case Da
42 >        self.pset = ''
43 >        self.datasetPath = ''
44  
45 +        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
46          # set FJR file name
47          self.fjrFileName = 'crab_fjr.xml'
48  
49          self.version = self.scram.getSWVersion()
50 +        common.logger.write("CMSSW version is: "+str(self.version))
51 +        try:
52 +            type, self.CMSSW_major, self.CMSSW_minor, self.CMSSW_patch = tuple(self.version.split('_'))
53 +        except:
54 +            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
55 +            raise CrabException(msg)
56  
57 <        #
58 <        # Try to block creation in case of arch/version mismatch
59 <        #
57 >        if self.CMSSW_major < 1 or (self.CMSSW_major == 1 and self.CMSSW_minor < 5):
58 >            msg = "CRAB supports CMSSW >= 1_5_x only. Use an older CRAB version."
59 >            raise CrabException(msg)
60 >            """
61 >            As CMSSW versions are dropped we can drop more code:
62 >            1.X dropped: drop support for running .cfg on WN
63 >            2.0 dropped: drop all support for cfg here and in writeCfg
64 >            2.0 dropped: Recheck the random number seed support
65 >            """
66  
67 <        a = string.split(self.version, "_")
67 >        ### collect Data cards
68  
52        if int(a[1]) == 1 and (int(a[2]) < 5 and self.executable_arch.find('slc4') == 0):
53            msg = "Warning: You are using %s version of CMSSW  with %s architecture. \n--> Did you compile your libraries with SLC3? Otherwise you can find some problems running on SLC4 Grid nodes.\n"%(self.version, self.executable_arch)
54            common.logger.message(msg)
55        if int(a[1]) == 1 and (int(a[2]) >= 5 and self.executable_arch.find('slc3') == 0):
56            msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch)
57            raise CrabException(msg)
69  
70 <        common.taskDB.setDict('codeVersion',self.version)
71 <        self.setParam_('application', self.version)
70 >        ### Temporary: added to remove input file control in the case of PU
71 >        self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
72  
73 <        ### collect Data cards
73 >        tmp =  cfg_params['CMSSW.datasetpath']
74 >        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
75  
76 <        if not cfg_params.has_key('CMSSW.datasetpath'):
76 >        if tmp =='':
77              msg = "Error: datasetpath not defined "
78              raise CrabException(msg)
79 <        tmp =  cfg_params['CMSSW.datasetpath']
68 <        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
69 <        if string.lower(tmp)=='none':
79 >        elif string.lower(tmp)=='none':
80              self.datasetPath = None
81              self.selectNoInput = 1
82          else:
83              self.datasetPath = tmp
84              self.selectNoInput = 0
85  
76        # ML monitoring
77        # split dataset path style: /PreProdR3Minbias/SIM/GEN-SIM
78        if not self.datasetPath:
79            self.setParam_('dataset', 'None')
80            self.setParam_('owner', 'None')
81        else:
82            ## SL what is supposed to fail here?
83            try:
84                datasetpath_split = self.datasetPath.split("/")
85                # standard style
86                self.setParam_('datasetFull', self.datasetPath)
87                self.setParam_('dataset', datasetpath_split[1])
88                self.setParam_('owner', datasetpath_split[2])
89            except:
90                self.setParam_('dataset', self.datasetPath)
91                self.setParam_('owner', self.datasetPath)
92
93        self.setParam_('taskId', common.taskDB.dict('taskId'))
94
86          self.dataTiers = []
87  
88 +        self.debugWrap=''
89 +        self.debug_wrapper = int(cfg_params.get('USER.debug_wrapper',0))
90 +        if self.debug_wrapper == 1: self.debugWrap='--debug'
91 +
92          ## now the application
93 +        self.managedGenerators = ['madgraph','comphep']
94 +        self.generator = cfg_params.get('CMSSW.generator','pythia').lower()
95          self.executable = cfg_params.get('CMSSW.executable','cmsRun')
99        self.setParam_('exe', self.executable)
96          log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
97  
98          if not cfg_params.has_key('CMSSW.pset'):
# Line 117 | Line 113 | class Cmssw(JobType):
113          self.output_file_sandbox.append(self.fjrFileName)
114  
115          # other output files to be returned via sandbox or copied to SE
116 +        outfileflag = False
117          self.output_file = []
118          tmp = cfg_params.get('CMSSW.output_file',None)
119          if tmp :
120 <            tmpOutFiles = string.split(tmp,',')
121 <            log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
122 <            for tmp in tmpOutFiles:
123 <                tmp=string.strip(tmp)
127 <                self.output_file.append(tmp)
128 <                pass
129 <        else:
130 <            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
131 <        pass
120 >            self.output_file = [x.strip() for x in tmp.split(',')]
121 >            outfileflag = True #output found
122 >        #else:
123 >        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
124  
125          # script_exe file as additional file in inputSandbox
126          self.scriptExe = cfg_params.get('USER.script_exe',None)
127          if self.scriptExe :
128 <           if not os.path.isfile(self.scriptExe):
129 <              msg ="ERROR. file "+self.scriptExe+" not found"
130 <              raise CrabException(msg)
131 <           self.additional_inbox_files.append(string.strip(self.scriptExe))
128 >            if not os.path.isfile(self.scriptExe):
129 >                msg ="ERROR. file "+self.scriptExe+" not found"
130 >                raise CrabException(msg)
131 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
132  
141        #CarlosDaniele
133          if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
134 <           msg ="Error. script_exe  not defined"
135 <           raise CrabException(msg)
134 >            msg ="Error. script_exe  not defined"
135 >            raise CrabException(msg)
136 >
137 >        # use parent files...
138 >        self.useParent = int(self.cfg_params.get('CMSSW.use_parent',0))
139  
140          ## additional input files
141          if cfg_params.has_key('USER.additional_input_files'):
# Line 161 | Line 155 | class Cmssw(JobType):
155                      if not os.path.exists(file):
156                          raise CrabException("Additional input file not found: "+file)
157                      pass
164                    # fname = string.split(file, '/')[-1]
165                    # storedFile = common.work_space.pathForTgz()+'share/'+fname
166                    # shutil.copyfile(file, storedFile)
158                      self.additional_inbox_files.append(string.strip(file))
159                  pass
160              pass
161              common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
162          pass
163  
173        ## Events per job
174        if cfg_params.has_key('CMSSW.events_per_job'):
175            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
176            self.selectEventsPerJob = 1
177        else:
178            self.eventsPerJob = -1
179            self.selectEventsPerJob = 0
180
181        ## number of jobs
182        if cfg_params.has_key('CMSSW.number_of_jobs'):
183            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
184            self.selectNumberOfJobs = 1
185        else:
186            self.theNumberOfJobs = 0
187            self.selectNumberOfJobs = 0
188
189        if cfg_params.has_key('CMSSW.total_number_of_events'):
190            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
191            self.selectTotalNumberEvents = 1
192        else:
193            self.total_number_of_events = 0
194            self.selectTotalNumberEvents = 0
195
196        if self.pset != None: #CarlosDaniele
197             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
198                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
199                 raise CrabException(msg)
200        else:
201             if (self.selectNumberOfJobs == 0):
202                 msg = 'Must specify  number_of_jobs.'
203                 raise CrabException(msg)
204
205        ## source seed for pythia
206        self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None)
207
208        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
209
210        self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None)
164  
165 <        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
165 >        ## New method of dealing with seeds
166 >        self.incrementSeeds = []
167 >        self.preserveSeeds = []
168 >        if cfg_params.has_key('CMSSW.preserve_seeds'):
169 >            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
170 >            for tmp in tmpList:
171 >                tmp.strip()
172 >                self.preserveSeeds.append(tmp)
173 >        if cfg_params.has_key('CMSSW.increment_seeds'):
174 >            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
175 >            for tmp in tmpList:
176 >                tmp.strip()
177 >                self.incrementSeeds.append(tmp)
178  
179          self.firstRun = cfg_params.get('CMSSW.first_run',None)
180  
216        if self.pset != None: #CarlosDaniele
217            import PsetManipulator as pp
218            PsetEdit = pp.PsetManipulator(self.pset) #Daniele Pset
219
181          # Copy/return
221
182          self.copy_data = int(cfg_params.get('USER.copy_data',0))
183          self.return_data = int(cfg_params.get('USER.return_data',0))
184  
185 +        self.conf = {}
186 +        self.conf['pubdata'] = None
187 +        # number of jobs requested to be created, limit obj splitting DD
188          #DBSDLS-start
189          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
190          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
# Line 233 | Line 196 | class Cmssw(JobType):
196          if self.datasetPath:
197              blockSites = self.DataDiscoveryAndLocation(cfg_params)
198          #DBSDLS-end
199 <
237 <        self.tgzNameWithPath = self.getTarBall(self.executable)
199 >        self.conf['blockSites']=blockSites
200  
201          ## Select Splitting
202 +        splitByRun = int(cfg_params.get('CMSSW.split_by_run',0))
203 +
204          if self.selectNoInput:
205 <            if self.pset == None: #CarlosDaniele
206 <                self.jobSplittingForScript()
205 >            if self.pset == None:
206 >                self.algo = 'ForScript'
207              else:
208 <                self.jobSplittingNoInput()
208 >                self.algo = 'NoInput'
209 >                self.conf['managedGenerators']=self.managedGenerators
210 >                self.conf['generator']=self.generator
211 >        elif splitByRun ==1:
212 >            self.algo = 'RunBased'
213          else:
214 <            self.jobSplittingByBlocks(blockSites)
214 >            self.algo = 'EventBased'
215 >
216 > #        self.algo = 'LumiBased'
217 >        splitter = JobSplitter(self.cfg_params,self.conf)
218 >        self.dict = splitter.Algos()[self.algo]()
219 >
220 >        self.argsFile= '%s/arguments.xml'%common.work_space.shareDir()
221 >        self.rootArgsFilename= 'arguments'
222 >        # modify Pset only the first time
223 >        if (isNew and self.pset != None): self.ModifyPset()
224 >
225 >        ## Prepare inputSandbox TarBall (only the first time)
226 >        self.tarNameWithPath = self.getTarBall(self.executable)
227 >
228 >
229 >    def ModifyPset(self):
230 >        import PsetManipulator as pp
231 >        PsetEdit = pp.PsetManipulator(self.pset)
232 >        try:
233 >            # Add FrameworkJobReport to parameter-set, set max events.
234 >            # Reset later for data jobs by writeCFG which does all modifications
235 >            PsetEdit.maxEvent(1)
236 >            PsetEdit.skipEvent(0)
237 >            PsetEdit.psetWriter(self.configFilename())
238 >            ## If present, add TFileService to output files
239 >            if not int(self.cfg_params.get('CMSSW.skip_TFileService_output',0)):
240 >                tfsOutput = PsetEdit.getTFileService()
241 >                if tfsOutput:
242 >                    if tfsOutput in self.output_file:
243 >                        common.logger.debug(5,"Output from TFileService "+tfsOutput+" already in output files")
244 >                    else:
245 >                        outfileflag = True #output found
246 >                        self.output_file.append(tfsOutput)
247 >                        common.logger.message("Adding "+tfsOutput+" (from TFileService) to list of output files")
248 >                    pass
249 >                pass
250 >            ## If present and requested, add PoolOutputModule to output files
251 >            if int(self.cfg_params.get('CMSSW.get_edm_output',0)):
252 >                edmOutput = PsetEdit.getPoolOutputModule()
253 >                if edmOutput:
254 >                    if edmOutput in self.output_file:
255 >                        common.logger.debug(5,"Output from PoolOutputModule "+edmOutput+" already in output files")
256 >                    else:
257 >                        self.output_file.append(edmOutput)
258 >                        common.logger.message("Adding "+edmOutput+" (from PoolOutputModule) to list of output files")
259 >                    pass
260 >                pass
261 >            # not required: check anyhow if present, to avoid accidental T2 overload
262 >            else:
263 >                edmOutput = PsetEdit.getPoolOutputModule()
264 >                if edmOutput and (edmOutput not in self.output_file):
265 >                    msg = "ERROR: a PoolOutputModule is present in your ParameteSet %s \n"%self.pset
266 >                    msg +="         but the file produced ( %s ) is not in the list of output files\n"%edmOutput
267 >                    msg += "WARNING: please remove it. If you want to keep it, add the file to output_files or use CMSSW.get_edm_output\n"
268 >                    raise CrabException(msg)
269 >                pass
270 >            pass
271 >        except CrabException, msg:
272 >            common.logger.message(str(msg))
273 >            msg='Error while manipulating ParameterSet (see previous message, if any): exiting...'
274 >            raise CrabException(msg)
275  
248        # modify Pset
249        if self.pset != None: #CarlosDaniele
250            try:
251                if (self.datasetPath): # standard job
252                    # allow to processa a fraction of events in a file
253                    PsetEdit.inputModule("INPUTFILE")
254                    PsetEdit.maxEvent(0)
255                    PsetEdit.skipEvent(0)
256                else:  # pythia like job
257                    PsetEdit.maxEvent(self.eventsPerJob)
258                    if (self.firstRun):
259                        PsetEdit.pythiaFirstRun(0)  #First Run
260                    if (self.sourceSeed) :
261                        PsetEdit.pythiaSeed(0)
262                        if (self.sourceSeedVtx) :
263                            PsetEdit.vtxSeed(0)
264                        if (self.sourceSeedG4) :
265                            PsetEdit.g4Seed(0)
266                        if (self.sourceSeedMix) :
267                            PsetEdit.mixSeed(0)
268                # add FrameworkJobReport to parameter-set
269                PsetEdit.addCrabFJR(self.fjrFileName)
270                PsetEdit.psetWriter(self.configFilename())
271            except:
272                msg='Error while manipuliating ParameterSet: exiting...'
273                raise CrabException(msg)
276  
277      def DataDiscoveryAndLocation(self, cfg_params):
278  
# Line 283 | Line 285 | class Cmssw(JobType):
285          ## Contact the DBS
286          common.logger.message("Contacting Data Discovery Services ...")
287          try:
288 <            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
288 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
289              self.pubdata.fetchDBSInfo()
290  
291          except DataDiscovery.NotExistingDatasetError, ex :
# Line 297 | Line 299 | class Cmssw(JobType):
299              raise CrabException(msg)
300  
301          self.filesbyblock=self.pubdata.getFiles()
302 <        self.eventsbyblock=self.pubdata.getEventsPerBlock()
303 <        self.eventsbyfile=self.pubdata.getEventsPerFile()
302 >        #print self.filesbyblock
303 >        self.conf['pubdata']=self.pubdata
304  
305          ## get max number of events
306 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
306 >        self.maxEvents=self.pubdata.getMaxEvents()
307  
308          ## Contact the DLS and build a list of sites hosting the fileblocks
309          try:
310              dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
311              dataloc.fetchDLSInfo()
312 +
313          except DataLocation.DataLocationError , ex:
314              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
315              raise CrabException(msg)
316  
317  
318 <        sites = dataloc.getSites()
318 >        unsorted_sites = dataloc.getSites()
319 >        #print "Unsorted :",unsorted_sites
320 >        sites = self.filesbyblock.fromkeys(self.filesbyblock,'')
321 >        for lfn in self.filesbyblock.keys():
322 >            #print lfn
323 >            if unsorted_sites.has_key(lfn):
324 >                #print "Found ",lfn
325 >                sites[lfn]=unsorted_sites[lfn]
326 >            else:
327 >                #print "Not Found ",lfn
328 >                sites[lfn]=[]
329 >        #print sites
330 >
331 >        #print "Sorted :",sites
332 >        if len(sites)==0:
333 >            msg = 'ERROR ***: no location for any of the blocks of this dataset: \n\t %s \n'%datasetPath
334 >            msg += "\tMaybe the dataset is located only at T1's (or at T0), where analysis jobs are not allowed\n"
335 >            msg += "\tPlease check DataDiscovery page https://cmsweb.cern.ch/dbs_discovery/\n"
336 >            raise CrabException(msg)
337 >
338          allSites = []
339          listSites = sites.values()
340          for listSite in listSites:
341              for oneSite in listSite:
342                  allSites.append(oneSite)
343 <        allSites = self.uniquelist(allSites)
343 >        [allSites.append(it) for it in allSites if not allSites.count(it)]
344 >
345  
346          # screen output
347          common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
348  
349          return sites
350  
328    def setArgsList(self, argsList):
329        self.argsList = argsList
351  
352 <    def jobSplittingByBlocks(self, blockSites):
332 <        """
333 <        Perform job splitting. Jobs run over an integer number of files
334 <        and no more than one block.
335 <        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
336 <        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
337 <                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
338 <                  self.maxEvents, self.filesbyblock
339 <        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
340 <              self.total_number_of_jobs - Total # of jobs
341 <              self.list_of_args - File(s) job will run on (a list of lists)
342 <        """
343 <
344 <        # ---- Handle the possible job splitting configurations ---- #
345 <        if (self.selectTotalNumberEvents):
346 <            totalEventsRequested = self.total_number_of_events
347 <        if (self.selectEventsPerJob):
348 <            eventsPerJobRequested = self.eventsPerJob
349 <            if (self.selectNumberOfJobs):
350 <                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
351 <
352 <        # If user requested all the events in the dataset
353 <        if (totalEventsRequested == -1):
354 <            eventsRemaining=self.maxEvents
355 <        # If user requested more events than are in the dataset
356 <        elif (totalEventsRequested > self.maxEvents):
357 <            eventsRemaining = self.maxEvents
358 <            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
359 <        # If user requested less events than are in the dataset
360 <        else:
361 <            eventsRemaining = totalEventsRequested
362 <
363 <        # If user requested more events per job than are in the dataset
364 <        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
365 <            eventsPerJobRequested = self.maxEvents
366 <
367 <        # For user info at end
368 <        totalEventCount = 0
369 <
370 <        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
371 <            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
372 <
373 <        if (self.selectNumberOfJobs):
374 <            common.logger.message("May not create the exact number_of_jobs requested.")
375 <
376 <        if ( self.ncjobs == 'all' ) :
377 <            totalNumberOfJobs = 999999999
378 <        else :
379 <            totalNumberOfJobs = self.ncjobs
380 <
381 <
382 <        blocks = blockSites.keys()
383 <        blockCount = 0
384 <        # Backup variable in case self.maxEvents counted events in a non-included block
385 <        numBlocksInDataset = len(blocks)
386 <
387 <        jobCount = 0
388 <        list_of_lists = []
389 <
390 <        # list tracking which jobs are in which jobs belong to which block
391 <        jobsOfBlock = {}
392 <
393 <        # ---- Iterate over the blocks in the dataset until ---- #
394 <        # ---- we've met the requested total # of events    ---- #
395 <        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
396 <            block = blocks[blockCount]
397 <            blockCount += 1
398 <            if block not in jobsOfBlock.keys() :
399 <                jobsOfBlock[block] = []
400 <
401 <            if self.eventsbyblock.has_key(block) :
402 <                numEventsInBlock = self.eventsbyblock[block]
403 <                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
404 <
405 <                files = self.filesbyblock[block]
406 <                numFilesInBlock = len(files)
407 <                if (numFilesInBlock <= 0):
408 <                    continue
409 <                fileCount = 0
410 <
411 <                # ---- New block => New job ---- #
412 <                parString = ""
413 <                # counter for number of events in files currently worked on
414 <                filesEventCount = 0
415 <                # flag if next while loop should touch new file
416 <                newFile = 1
417 <                # job event counter
418 <                jobSkipEventCount = 0
419 <
420 <                # ---- Iterate over the files in the block until we've met the requested ---- #
421 <                # ---- total # of events or we've gone over all the files in this block  ---- #
422 <                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
423 <                    file = files[fileCount]
424 <                    if newFile :
425 <                        try:
426 <                            numEventsInFile = self.eventsbyfile[file]
427 <                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
428 <                            # increase filesEventCount
429 <                            filesEventCount += numEventsInFile
430 <                            # Add file to current job
431 <                            parString += '\\\"' + file + '\\\"\,'
432 <                            newFile = 0
433 <                        except KeyError:
434 <                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
435 <
436 <
437 <                    # if less events in file remain than eventsPerJobRequested
438 <                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) :
439 <                        # if last file in block
440 <                        if ( fileCount == numFilesInBlock-1 ) :
441 <                            # end job using last file, use remaining events in block
442 <                            # close job and touch new file
443 <                            fullString = parString[:-2]
444 <                            list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
445 <                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
446 <                            self.jobDestination.append(blockSites[block])
447 <                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
448 <                            # fill jobs of block dictionary
449 <                            jobsOfBlock[block].append(jobCount+1)
450 <                            # reset counter
451 <                            jobCount = jobCount + 1
452 <                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
453 <                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
454 <                            jobSkipEventCount = 0
455 <                            # reset file
456 <                            parString = ""
457 <                            filesEventCount = 0
458 <                            newFile = 1
459 <                            fileCount += 1
460 <                        else :
461 <                            # go to next file
462 <                            newFile = 1
463 <                            fileCount += 1
464 <                    # if events in file equal to eventsPerJobRequested
465 <                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
466 <                        # close job and touch new file
467 <                        fullString = parString[:-2]
468 <                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
469 <                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
470 <                        self.jobDestination.append(blockSites[block])
471 <                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
472 <                        jobsOfBlock[block].append(jobCount+1)
473 <                        # reset counter
474 <                        jobCount = jobCount + 1
475 <                        totalEventCount = totalEventCount + eventsPerJobRequested
476 <                        eventsRemaining = eventsRemaining - eventsPerJobRequested
477 <                        jobSkipEventCount = 0
478 <                        # reset file
479 <                        parString = ""
480 <                        filesEventCount = 0
481 <                        newFile = 1
482 <                        fileCount += 1
483 <
484 <                    # if more events in file remain than eventsPerJobRequested
485 <                    else :
486 <                        # close job but don't touch new file
487 <                        fullString = parString[:-2]
488 <                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
489 <                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
490 <                        self.jobDestination.append(blockSites[block])
491 <                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
492 <                        jobsOfBlock[block].append(jobCount+1)
493 <                        # increase counter
494 <                        jobCount = jobCount + 1
495 <                        totalEventCount = totalEventCount + eventsPerJobRequested
496 <                        eventsRemaining = eventsRemaining - eventsPerJobRequested
497 <                        # calculate skip events for last file
498 <                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
499 <                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
500 <                        # remove all but the last file
501 <                        filesEventCount = self.eventsbyfile[file]
502 <                        parString = ""
503 <                        parString += '\\\"' + file + '\\\"\,'
504 <                    pass # END if
505 <                pass # END while (iterate over files in the block)
506 <        pass # END while (iterate over blocks in the dataset)
507 <        self.ncjobs = self.total_number_of_jobs = jobCount
508 <        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
509 <            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
510 <        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
511 <
512 <        # screen output
513 <        screenOutput = "List of jobs and available destination sites:\n\n"
514 <
515 <        # keep trace of block with no sites to print a warning at the end
516 <        noSiteBlock = []
517 <        bloskNoSite = []
518 <
519 <        blockCounter = 0
520 <        for block in blocks:
521 <            if block in jobsOfBlock.keys() :
522 <                blockCounter += 1
523 <                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
524 <                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
525 <                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
526 <                    bloskNoSite.append( blockCounter )
527 <
528 <        common.logger.message(screenOutput)
529 <        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
530 <            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
531 <            virgola = ""
532 <            if len(bloskNoSite) > 1:
533 <                virgola = ","
534 <            for block in bloskNoSite:
535 <                msg += ' ' + str(block) + virgola
536 <            msg += '\n               Related jobs:\n                 '
537 <            virgola = ""
538 <            if len(noSiteBlock) > 1:
539 <                virgola = ","
540 <            for range_jobs in noSiteBlock:
541 <                msg += str(range_jobs) + virgola
542 <            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
543 <            if self.cfg_params.has_key('EDG.se_white_list'):
544 <                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
545 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
546 <                msg += 'Please check if the dataset is available at this site!)\n'
547 <            if self.cfg_params.has_key('EDG.ce_white_list'):
548 <                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
549 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
550 <                msg += 'Please check if the dataset is available at this site!)\n'
551 <
552 <            common.logger.message(msg)
553 <
554 <        self.list_of_args = list_of_lists
555 <        return
556 <
557 <    def jobSplittingNoInput(self):
558 <        """
559 <        Perform job splitting based on number of event per job
560 <        """
561 <        common.logger.debug(5,'Splitting per events')
352 >    def split(self, jobParams,firstJobID):
353  
354 <        if (self.selectEventsPerJob):
355 <            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
356 <        if (self.selectNumberOfJobs):
566 <            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
567 <        if (self.selectTotalNumberEvents):
568 <            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
354 >        jobParams = self.dict['args']
355 >        njobs = self.dict['njobs']
356 >        self.jobDestination = self.dict['jobDestination']
357  
358 <        if (self.total_number_of_events < 0):
359 <            msg='Cannot split jobs per Events with "-1" as total number of events'
572 <            raise CrabException(msg)
358 >        if njobs==0:
359 >            raise CrabException("Ask to split "+str(njobs)+" jobs: aborting")
360  
361 <        if (self.selectEventsPerJob):
362 <            if (self.selectTotalNumberEvents):
363 <                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
577 <            elif(self.selectNumberOfJobs) :
578 <                self.total_number_of_jobs =self.theNumberOfJobs
579 <                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
580 <
581 <        elif (self.selectNumberOfJobs) :
582 <            self.total_number_of_jobs = self.theNumberOfJobs
583 <            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
584 <
585 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
586 <
587 <        # is there any remainder?
588 <        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
589 <
590 <        common.logger.debug(5,'Check  '+str(check))
591 <
592 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
593 <        if check > 0:
594 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
595 <
596 <        # argument is seed number.$i
597 <        self.list_of_args = []
598 <        for i in range(self.total_number_of_jobs):
599 <            ## Since there is no input, any site is good
600 <            self.jobDestination.append([""]) #must be empty to write correctly the xml
601 <            args=[]
602 <            if (self.firstRun):
603 <                ## pythia first run
604 <                args.append(str(self.firstRun)+str(i))
605 <            if (self.sourceSeed):
606 <                args.append(str(self.sourceSeed)+str(i))
607 <                if (self.sourceSeedVtx):
608 <                    ## + vtx random seed
609 <                    args.append(str(self.sourceSeedVtx)+str(i))
610 <                if (self.sourceSeedG4):
611 <                    ## + G4 random seed
612 <                    args.append(str(self.sourceSeedG4)+str(i))
613 <                if (self.sourceSeedMix):
614 <                    ## + Mix random seed
615 <                    args.append(str(self.sourceSeedMix)+str(i))
616 <                pass
617 <            pass
618 <            self.list_of_args.append(args)
619 <        pass
361 >        # create the empty structure
362 >        for i in range(njobs):
363 >            jobParams.append("")
364  
365 +        listID=[]
366 +        listField=[]
367 +        listDictions=[]
368 +        exist= os.path.exists(self.argsFile)
369 +        for id in range(njobs):
370 +            job = id + int(firstJobID)
371 +            listID.append(job+1)
372 +            job_ToSave ={}
373 +            concString = ' '
374 +            argu=''
375 +            str_argu = str(job+1)
376 +            if len(jobParams[id]):
377 +                argu = {'JobID': job+1}
378 +                for i in range(len(jobParams[id])):
379 +                    argu[self.dict['params'][i]]=jobParams[id][i]
380 +                # just for debug
381 +                str_argu += concString.join(jobParams[id])
382 +            listDictions.append(argu)
383 +            job_ToSave['arguments']= str(job+1)
384 +            job_ToSave['dlsDestination']= self.jobDestination[id]
385 +            listField.append(job_ToSave)
386 +            msg="Job  %s  Arguments:  %s\n"%(str(job+1),str_argu)
387 +            msg+="\t  Destination: %s "%(str(self.jobDestination[id]))
388 +            common.logger.debug(5,msg)
389 +        # write xml
390 +        if len(listDictions):
391 +            if exist==False: self.CreateXML()
392 +            self.addEntry(listDictions)
393 +            self.addXMLfile()
394 +        common._db.updateJob_(listID,listField)
395 +        self.zipTarFile()
396          return
397 +      
398 +    def addXMLfile(self):
399  
400 +        import tarfile
401 +       # try:
402 +        print self.argsFile
403 +        tar = tarfile.open(self.tarNameWithPath, "a")
404 +        tar.add(self.argsFile, os.path.basename(self.argsFile))
405 +        tar.close()
406 +       ## except:
407 +       #     pass
408  
409 <    def jobSplittingForScript(self):#CarlosDaniele
409 >  
410 >    def CreateXML(self):
411          """
626        Perform job splitting based on number of job
412          """
413 <        common.logger.debug(5,'Splitting per job')
414 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
630 <
631 <        self.total_number_of_jobs = self.theNumberOfJobs
632 <
633 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
634 <
635 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
636 <
637 <        # argument is seed number.$i
638 <        self.list_of_args = []
639 <        for i in range(self.total_number_of_jobs):
640 <            ## Since there is no input, any site is good
641 <           # self.jobDestination.append(["Any"])
642 <            self.jobDestination.append([""])
643 <            ## no random seed
644 <            self.list_of_args.append([str(i)])
413 >        result = IMProvNode( self.rootArgsFilename )
414 >        outfile = file( self.argsFile, 'w').write(str(result))
415          return
416  
417 <    def split(self, jobParams):
418 <
419 <        common.jobDB.load()
650 <        #### Fabio
651 <        njobs = self.total_number_of_jobs
652 <        arglist = self.list_of_args
653 <        # create the empty structure
654 <        for i in range(njobs):
655 <            jobParams.append("")
656 <
657 <        for job in range(njobs):
658 <            jobParams[job] = arglist[job]
659 <            # print str(arglist[job])
660 <            # print jobParams[job]
661 <            common.jobDB.setArguments(job, jobParams[job])
662 <            common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job]))
663 <            common.jobDB.setDestination(job, self.jobDestination[job])
417 >    def addEntry(self, listDictions):
418 >        """
419 >        _addEntry_
420  
421 <        common.jobDB.save()
421 >        add an entry to the xml file
422 >        """
423 >        from IMProv.IMProvLoader import loadIMProvFile
424 >        ## load xml
425 >        improvDoc = loadIMProvFile(self.argsFile)
426 >        entrname= 'Job'
427 >        for dictions in listDictions:
428 >           report = IMProvNode(entrname , None, **dictions)
429 >           improvDoc.addNode(report)
430 >        outfile = file( self.argsFile, 'w').write(str(improvDoc))
431          return
432  
668    def getJobTypeArguments(self, nj, sched):
669        result = ''
670        for i in common.jobDB.arguments(nj):
671            result=result+str(i)+" "
672        return result
673
433      def numberOfJobs(self):
434 <        # Fabio
676 <        return self.total_number_of_jobs
434 >        return self.dict['njobs']
435  
436      def getTarBall(self, exe):
437          """
438          Return the TarBall with lib and exe
439          """
440 <
441 <        # if it exist, just return it
442 <        #
685 <        # Marco. Let's start to use relative path for Boss XML files
686 <        #
687 <        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
688 <        if os.path.exists(self.tgzNameWithPath):
689 <            return self.tgzNameWithPath
440 >        self.tarNameWithPath = common.work_space.pathForTgz()+self.tar_name
441 >        if os.path.exists(self.tarNameWithPath):
442 >            return self.tarNameWithPath
443  
444          # Prepare a tar gzipped file with user binaries.
445          self.buildTar_(exe)
446  
447 <        return string.strip(self.tgzNameWithPath)
447 >        return string.strip(self.tarNameWithPath)
448  
449      def buildTar_(self, executable):
450  
451          # First of all declare the user Scram area
452          swArea = self.scram.getSWArea_()
700        #print "swArea = ", swArea
701        # swVersion = self.scram.getSWVersion()
702        # print "swVersion = ", swVersion
453          swReleaseTop = self.scram.getReleaseTop_()
704        #print "swReleaseTop = ", swReleaseTop
454  
455          ## check if working area is release top
456          if swReleaseTop == '' or swArea == swReleaseTop:
457 +            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
458              return
459  
460          import tarfile
461          try: # create tar ball
462 <            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
462 >            #tar = tarfile.open(self.tgzNameWithPath, "w:gz")
463 >            tar = tarfile.open(self.tarNameWithPath, "w")
464              ## First find the executable
465              if (self.executable != ''):
466                  exeWithPath = self.scram.findFile_(executable)
# Line 733 | Line 484 | class Cmssw(JobType):
484                      pass
485  
486              ## Now get the libraries: only those in local working area
487 +            tar.dereference=True
488              libDir = 'lib'
489              lib = swArea+'/' +libDir
490              common.logger.debug(5,"lib "+lib+" to be tarred")
# Line 744 | Line 496 | class Cmssw(JobType):
496              module = swArea + '/' + moduleDir
497              if os.path.isdir(module):
498                  tar.add(module,moduleDir)
499 +            tar.dereference=False
500  
501              ## Now check if any data dir(s) is present
502 <            swAreaLen=len(swArea)
503 <            for root, dirs, files in os.walk(swArea):
504 <                if "data" in dirs:
505 <                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
506 <                    tar.add(root+"/data",root[swAreaLen:]+"/data")
507 <
508 <            ### Removed ProdAgent Api dependencies ###
509 <            ### Add ProdAgent dir to tar
510 <            #paDir = 'ProdAgentApi'
511 <            #pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi'
512 <            #if os.path.isdir(pa):
513 <            #    tar.add(pa,paDir)
502 >            self.dataExist = False
503 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
504 >            while len(todo_list):
505 >                entry, name = todo_list.pop()
506 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
507 >                    continue
508 >                if os.path.isdir(swArea+"/src/"+entry):
509 >                    entryPath = entry + '/'
510 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
511 >                    if name == 'data':
512 >                        self.dataExist=True
513 >                        common.logger.debug(5,"data "+entry+" to be tarred")
514 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
515 >                    pass
516 >                pass
517  
518 <            ## Add ProdCommon dir to tar
519 <            prodcommonDir = 'ProdCommon'
520 <            prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon'
521 <            if os.path.isdir(prodcommonPath):
766 <                tar.add(prodcommonPath,prodcommonDir)
518 >            ### CMSSW ParameterSet
519 >            if not self.pset is None:
520 >                cfg_file = common.work_space.jobDir()+self.configFilename()
521 >                tar.add(cfg_file,self.configFilename())
522  
768            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
769            tar.close()
770        except :
771            raise CrabException('Could not create tar-ball')
523  
524 <        ## check for tarball size
525 <        tarballinfo = os.stat(self.tgzNameWithPath)
526 <        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
527 <            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
524 >            ## Add ProdCommon dir to tar
525 >            prodcommonDir = './'
526 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
527 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools', \
528 >                           'ProdCommon/Core', 'ProdCommon/MCPayloads', 'IMProv', 'ProdCommon/Storage', \
529 >                           'WMCore/__init__.py','WMCore/Algorithms']
530 >            for file in neededStuff:
531 >                tar.add(prodcommonPath+file,prodcommonDir+file)
532  
533 <        ## create tar-ball with ML stuff
534 <        self.MLtgzfile =  common.work_space.pathForTgz()+'share/MLfiles.tgz'
780 <        try:
781 <            tar = tarfile.open(self.MLtgzfile, "w:gz")
533 >            ##### ML stuff
534 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
535              path=os.environ['CRABDIR'] + '/python/'
536 <            for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py']:
536 >            for file in ML_file_list:
537                  tar.add(path+file,file)
538 <            common.logger.debug(5,"Files added to "+self.MLtgzfile+" : "+str(tar.getnames()))
538 >
539 >            ##### Utils
540 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py','cmscp.py']
541 >            for file in Utils_file_list:
542 >                tar.add(path+file,file)
543 >
544 >            ##### AdditionalFiles
545 >            tar.dereference=True
546 >            for file in self.additional_inbox_files:
547 >                tar.add(file,string.split(file,'/')[-1])
548 >            tar.dereference=False
549 >            common.logger.debug(5,"Files in "+self.tarNameWithPath+" : "+str(tar.getnames()))
550 >
551              tar.close()
552 <        except :
553 <            raise CrabException('Could not create ML files tar-ball')
552 >        except IOError, exc:
553 >            common.logger.write(str(exc))
554 >            raise CrabException('Could not create tar-ball '+self.tarNameWithPath)
555 >        except tarfile.TarError, exc:
556 >            common.logger.write(str(exc))
557 >            raise CrabException('Could not create tar-ball '+self.tarNameWithPath)
558 >  
559 >    def zipTarFile(self):  
560  
561 <        return
561 >        cmd = "gzip -c %s > %s "%(self.tarNameWithPath,self.tgzNameWithPath)
562 >        res=runCommand(cmd)
563  
564 <    def additionalInputFileTgz(self):
565 <        """
566 <        Put all additional files into a tar ball and return its name
567 <        """
568 <        import tarfile
569 <        tarName=  common.work_space.pathForTgz()+'share/'+self.additional_tgz_name
570 <        tar = tarfile.open(tarName, "w:gz")
571 <        for file in self.additional_inbox_files:
572 <            tar.add(file,string.split(file,'/')[-1])
573 <        common.logger.debug(5,"Files added to "+self.additional_tgz_name+" : "+str(tar.getnames()))
802 <        tar.close()
803 <        return tarName
564 >        tarballinfo = os.stat(self.tgzNameWithPath)
565 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
566 >            msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) \
567 >               +'MB input sandbox limit \n'
568 >            msg += '      and not supported by the direct GRID submission system.\n'
569 >            msg += '      Please use the CRAB server mode by setting server_name=<NAME> in section [CRAB] of your crab.cfg.\n'
570 >            msg += '      For further infos please see https://twiki.cern.ch/twiki/bin/view/CMS/CrabServer#CRABSERVER_for_Users'
571 >            raise CrabException(msg)
572 >
573 >        ## create tar-ball with ML stuff
574  
575 <    def wsSetupEnvironment(self, nj):
575 >    def wsSetupEnvironment(self, nj=0):
576          """
577          Returns part of a job script which prepares
578          the execution environment for the job 'nj'.
579          """
580 +        # FUTURE: Drop support for .cfg when possible
581 +        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
582 +            psetName = 'pset.py'
583 +        else:
584 +            psetName = 'pset.cfg'
585          # Prepare JobType-independent part
586 <        txt = ''
586 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
587          txt += 'echo ">>> setup environment"\n'
588 <        txt += 'if [ $middleware == LCG ]; then \n'
588 >        txt += 'if [ $middleware == LCG ] || [ $middleware == CAF ] || [ $middleware == LSF ]; then \n'
589          txt += self.wsSetupCMSLCGEnvironment_()
590          txt += 'elif [ $middleware == OSG ]; then\n'
591          txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
592          txt += '    if [ ! $? == 0 ] ;then\n'
593 <        txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
594 <        txt += '        echo "JOB_EXIT_STATUS = 10016"\n'
595 <        txt += '        echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
821 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
822 <        txt += '        exit 1\n'
593 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
594 >        txt += '        job_exit_code=10016\n'
595 >        txt += '        func_exit\n'
596          txt += '    fi\n'
597          txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
598          txt += '\n'
# Line 827 | Line 600 | class Cmssw(JobType):
600          txt += '    cd $WORKING_DIR\n'
601          txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
602          txt += self.wsSetupCMSOSGEnvironment_()
603 <        #txt += '    echo "### Set SCRAM ARCH to ' + self.executable_arch + ' ###"\n'
604 <        #txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
603 >        #Setup SGE Environment
604 >        txt += 'elif [ $middleware == SGE ]; then\n'
605 >        txt += self.wsSetupCMSLCGEnvironment_()
606 >
607 >        txt += 'elif [ $middleware == ARC ]; then\n'
608 >        txt += self.wsSetupCMSLCGEnvironment_()
609 >
610          txt += 'fi\n'
611  
612          # Prepare JobType-specific part
# Line 839 | Line 617 | class Cmssw(JobType):
617          txt += scram+' project CMSSW '+self.version+'\n'
618          txt += 'status=$?\n'
619          txt += 'if [ $status != 0 ] ; then\n'
620 <        txt += '    echo "SET_EXE_ENV 10034 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
621 <        txt += '    echo "JOB_EXIT_STATUS = 10034"\n'
622 <        txt += '    echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n'
845 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
846 <        txt += '    if [ $middleware == OSG ]; then \n'
847 <        txt += '        cd $RUNTIME_AREA\n'
848 <        txt += '        echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
849 <        txt += '        echo ">>> Remove working directory: $WORKING_DIR"\n'
850 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
851 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
852 <        txt += '            echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
853 <        txt += '            echo "JOB_EXIT_STATUS = 10018"\n'
854 <        txt += '            echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
855 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
856 <        txt += '        fi\n'
857 <        txt += '    fi \n'
858 <        txt += '    exit 1 \n'
620 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
621 >        txt += '    job_exit_code=10034\n'
622 >        txt += '    func_exit\n'
623          txt += 'fi \n'
624          txt += 'cd '+self.version+'\n'
625 <        ########## FEDE FOR DBS2 ######################
862 <        txt += 'SOFTWARE_DIR=`pwd`\n'
625 >        txt += 'SOFTWARE_DIR=`pwd`; export SOFTWARE_DIR\n'
626          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
864        ###############################################
865        ### needed grep for bug in scramv1 ###
627          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
628 +        txt += 'if [ $? != 0 ] ; then\n'
629 +        txt += '    echo "ERROR ==> Problem with the command: "\n'
630 +        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
631 +        txt += '    job_exit_code=10034\n'
632 +        txt += '    func_exit\n'
633 +        txt += 'fi \n'
634          # Handle the arguments:
635          txt += "\n"
636          txt += "## number of arguments (first argument always jobnumber)\n"
637          txt += "\n"
638 <        txt += "if [ $nargs -lt "+str(len(self.argsList[nj].split()))+" ]\n"
638 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
639          txt += "then\n"
640 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$nargs+ \n"
641 <        txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
642 <        txt += '    echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n'
876 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
877 <        txt += '    if [ $middleware == OSG ]; then \n'
878 <        txt += '        cd $RUNTIME_AREA\n'
879 <        txt += '        echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
880 <        txt += '        echo ">>> Remove working directory: $WORKING_DIR"\n'
881 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
882 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
883 <        txt += '            echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
884 <        txt += '            echo "JOB_EXIT_STATUS = 50114"\n'
885 <        txt += '            echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
886 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
887 <        txt += '        fi\n'
888 <        txt += '    fi \n'
889 <        txt += "    exit 1\n"
640 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
641 >        txt += '    job_exit_code=50113\n'
642 >        txt += "    func_exit\n"
643          txt += "fi\n"
644          txt += "\n"
645  
646          # Prepare job-specific part
647          job = common.job_list[nj]
895        ### FEDE FOR DBS OUTPUT PUBLICATION
648          if (self.datasetPath):
649 +            self.primaryDataset = self.datasetPath.split("/")[1]
650 +            DataTier = self.datasetPath.split("/")[2]
651              txt += '\n'
652              txt += 'DatasetPath='+self.datasetPath+'\n'
653  
654 <            datasetpath_split = self.datasetPath.split("/")
655 <
902 <            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
903 <            txt += 'DataTier='+datasetpath_split[2]+'\n'
654 >            txt += 'PrimaryDataset='+self.primaryDataset +'\n'
655 >            txt += 'DataTier='+DataTier+'\n'
656              txt += 'ApplicationFamily=cmsRun\n'
657  
658          else:
659 +            self.primaryDataset = 'null'
660              txt += 'DatasetPath=MCDataTier\n'
661              txt += 'PrimaryDataset=null\n'
662              txt += 'DataTier=null\n'
663              txt += 'ApplicationFamily=MCDataTier\n'
664 <        if self.pset != None: #CarlosDaniele
664 >        if self.pset != None:
665              pset = os.path.basename(job.configFilename())
666              txt += '\n'
667              txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
915            if (self.datasetPath): # standard job
916                txt += 'InputFiles=${args[1]}\n'
917                txt += 'MaxEvents=${args[2]}\n'
918                txt += 'SkipEvents=${args[3]}\n'
919                txt += 'echo "Inputfiles:<$InputFiles>"\n'
920                txt += 'sed "s#\'INPUTFILE\'#$InputFiles#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
921                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
922                txt += 'sed "s#int32 input = 0#int32 input = $MaxEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
923                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
924                txt += 'sed "s#uint32 skipEvents = 0#uint32 skipEvents = $SkipEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
925            else:  # pythia like job
926                seedIndex=1
927                if (self.firstRun):
928                    txt += 'FirstRun=${args['+str(seedIndex)+']}\n'
929                    txt += 'echo "FirstRun: <$FirstRun>"\n'
930                    txt += 'sed "s#uint32 firstRun = 0#uint32 firstRun = $FirstRun#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
931                    seedIndex=seedIndex+1
932
933                if (self.sourceSeed):
934                    txt += 'Seed=${args['+str(seedIndex)+']}\n'
935                    txt += 'sed "s#uint32 sourceSeed = 0#uint32 sourceSeed = $Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
936                    seedIndex=seedIndex+1
937                    ## the following seeds are not always present
938                    if (self.sourceSeedVtx):
939                        txt += 'VtxSeed=${args['+str(seedIndex)+']}\n'
940                        txt += 'echo "VtxSeed: <$VtxSeed>"\n'
941                        txt += 'sed "s#uint32 VtxSmeared = 0#uint32 VtxSmeared = $VtxSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
942                        seedIndex += 1
943                    if (self.sourceSeedG4):
944                        txt += 'G4Seed=${args['+str(seedIndex)+']}\n'
945                        txt += 'echo "G4Seed: <$G4Seed>"\n'
946                        txt += 'sed "s#uint32 g4SimHits = 0#uint32 g4SimHits = $G4Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
947                        seedIndex += 1
948                    if (self.sourceSeedMix):
949                        txt += 'mixSeed=${args['+str(seedIndex)+']}\n'
950                        txt += 'echo "MixSeed: <$mixSeed>"\n'
951                        txt += 'sed "s#uint32 mix = 0#uint32 mix = $mixSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
952                        seedIndex += 1
953                    pass
954                pass
955            txt += 'mv -f '+pset+' pset.cfg\n'
668  
669 <        if len(self.additional_inbox_files) > 0:
670 <            txt += 'if [ -e $RUNTIME_AREA/'+self.additional_tgz_name+' ] ; then\n'
671 <            txt += '  tar xzvf $RUNTIME_AREA/'+self.additional_tgz_name+'\n'
672 <            txt += 'fi\n'
961 <            pass
669 >            txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
670 >            txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
671 >            txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
672 >            txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
673  
674 <        if self.pset != None: #CarlosDaniele
675 <            txt += '\n'
676 <            txt += 'echo "***** cat pset.cfg *********"\n'
677 <            txt += 'cat pset.cfg\n'
678 <            txt += 'echo "****** end pset.cfg ********"\n'
674 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
675 >
676 >
677 >        if self.pset != None:
678 >            # FUTURE: Can simply for 2_1_x and higher
679              txt += '\n'
680 <            ### FEDE FOR DBS OUTPUT PUBLICATION
681 <            txt += 'PSETHASH=`EdmConfigHash < pset.cfg` \n'
680 >            if self.debug_wrapper == 1:
681 >                txt += 'echo "***** cat ' + psetName + ' *********"\n'
682 >                txt += 'cat ' + psetName + '\n'
683 >                txt += 'echo "****** end ' + psetName + ' ********"\n'
684 >                txt += '\n'
685 >                txt += 'echo "***********************" \n'
686 >                txt += 'which edmConfigHash \n'
687 >                txt += 'echo "***********************" \n'
688 >            if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
689 >                txt += 'edmConfigHash ' + psetName + ' \n'
690 >                txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
691 >            else:
692 >                txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
693              txt += 'echo "PSETHASH = $PSETHASH" \n'
694 <            ##############
694 >            #### FEDE temporary fix for noEdm files #####
695 >            txt += 'if [ -z "$PSETHASH" ]; then \n'
696 >            txt += '   export PSETHASH=null\n'
697 >            txt += 'fi \n'
698 >            #############################################
699              txt += '\n'
700          return txt
701  
702 <    def wsBuildExe(self, nj=0):
702 >    def wsUntarSoftware(self, nj=0):
703          """
704          Put in the script the commands to build an executable
705          or a library.
706          """
707  
708 <        txt = ""
708 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
709  
710          if os.path.isfile(self.tgzNameWithPath):
711              txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
712 <            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
712 >            txt += 'tar zxvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
713 >            if  self.debug_wrapper==1 :
714 >                txt += 'ls -Al \n'
715              txt += 'untar_status=$? \n'
716              txt += 'if [ $untar_status -ne 0 ]; then \n'
717 <            txt += '   echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n'
718 <            txt += '   echo "JOB_EXIT_STATUS = $untar_status" \n'
719 <            txt += '   echo "JobExitCode=$untar_status" | tee -a $RUNTIME_AREA/$repo\n'
992 <            txt += '   if [ $middleware == OSG ]; then \n'
993 <            txt += '       cd $RUNTIME_AREA\n'
994 <            txt += '        echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
995 <            txt += '        echo ">>> Remove working directory: $WORKING_DIR"\n'
996 <            txt += '       /bin/rm -rf $WORKING_DIR\n'
997 <            txt += '       if [ -d $WORKING_DIR ] ;then\n'
998 <            txt += '           echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n'
999 <            txt += '           echo "JOB_EXIT_STATUS = 50999"\n'
1000 <            txt += '           echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n'
1001 <            txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1002 <            txt += '       fi\n'
1003 <            txt += '   fi \n'
1004 <            txt += '   \n'
1005 <            txt += '   exit 1 \n'
717 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
718 >            txt += '   job_exit_code=$untar_status\n'
719 >            txt += '   func_exit\n'
720              txt += 'else \n'
721              txt += '   echo "Successful untar" \n'
722              txt += 'fi \n'
723              txt += '\n'
724 <            #### Removed ProdAgent API dependencies
1011 <            txt += 'echo ">>> Include ProdCommon in PYTHONPATH:"\n'
724 >            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
725              txt += 'if [ -z "$PYTHONPATH" ]; then\n'
726 <            #### FEDE FOR DBS OUTPUT PUBLICATION
1014 <            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdCommon\n'
726 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
727              txt += 'else\n'
728 <            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n'
728 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
729              txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1018            ###################
730              txt += 'fi\n'
731              txt += '\n'
732  
# Line 1023 | Line 734 | class Cmssw(JobType):
734  
735          return txt
736  
737 <    def modifySteeringCards(self, nj):
737 >    def wsBuildExe(self, nj=0):
738          """
739 <        modify the card provided by the user,
740 <        writing a new card into share dir
739 >        Put in the script the commands to build an executable
740 >        or a library.
741          """
742  
743 +        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
744 +        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
745 +
746 +        txt += 'rm -r lib/ module/ \n'
747 +        txt += 'mv $RUNTIME_AREA/lib/ . \n'
748 +        txt += 'mv $RUNTIME_AREA/module/ . \n'
749 +        if self.dataExist == True:
750 +            txt += 'rm -r src/ \n'
751 +            txt += 'mv $RUNTIME_AREA/src/ . \n'
752 +        if len(self.additional_inbox_files)>0:
753 +            for file in self.additional_inbox_files:
754 +                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
755 +        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
756 +        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
757 +
758 +        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
759 +        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
760 +        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
761 +        txt += 'else\n'
762 +        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
763 +        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
764 +        txt += 'fi\n'
765 +        txt += '\n'
766 +
767 +        return txt
768 +
769 +
770      def executableName(self):
771 <        if self.scriptExe: #CarlosDaniele
771 >        if self.scriptExe:
772              return "sh "
773          else:
774              return self.executable
775  
776      def executableArgs(self):
777 <        if self.scriptExe:#CarlosDaniele
778 <            return   self.scriptExe + " $NJob"
777 >        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
778 >        if self.scriptExe:
779 >            return self.scriptExe + " $NJob"
780          else:
781 <            # if >= CMSSW_1_5_X, add -j crab_fjr.xml
782 <            version_array = self.scram.getSWVersion().split('_')
783 <            major = 0
784 <            minor = 0
785 <            try:
1047 <                major = int(version_array[1])
1048 <                minor = int(version_array[2])
1049 <            except:
1050 <                msg = "Cannot parse CMSSW version string: " + "_".join(version_array) + " for major and minor release number!"
1051 <                raise CrabException(msg)
1052 <            if major >= 1 and minor >= 5 :
1053 <                return " -j " + self.fjrFileName + " -p pset.cfg"
781 >            ex_args = ""
782 >            ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
783 >            # Type of config file depends on CMSSW version
784 >            if self.CMSSW_major >= 2 :
785 >                ex_args += " -p pset.py"
786              else:
787 <                return " -p pset.cfg"
787 >                ex_args += " -p pset.cfg"
788 >            return ex_args
789  
790      def inputSandbox(self, nj):
791          """
792          Returns a list of filenames to be put in JDL input sandbox.
793          """
794          inp_box = []
1062        # # dict added to delete duplicate from input sandbox file list
1063        # seen = {}
1064        ## code
795          if os.path.isfile(self.tgzNameWithPath):
796              inp_box.append(self.tgzNameWithPath)
797 <        if os.path.isfile(self.MLtgzfile):
1068 <            inp_box.append(self.MLtgzfile)
1069 <        ## config
1070 <        if not self.pset is None:
1071 <            inp_box.append(common.work_space.pathForTgz() + 'job/' + self.configFilename())
1072 <        ## additional input files
1073 <        tgz = self.additionalInputFileTgz()
1074 <        inp_box.append(tgz)
797 >        inp_box.append(common.work_space.jobDir() + self.scriptName)
798          return inp_box
799  
800      def outputSandbox(self, nj):
# Line 1083 | Line 806 | class Cmssw(JobType):
806          ## User Declared output files
807          for out in (self.output_file+self.output_file_sandbox):
808              n_out = nj + 1
809 <            out_box.append(self.numberFile_(out,str(n_out)))
809 >            out_box.append(numberFile(out,str(n_out)))
810          return out_box
811  
1089    def prepareSteeringCards(self):
1090        """
1091        Make initial modifications of the user's steering card file.
1092        """
1093        return
812  
813      def wsRenameOutput(self, nj):
814          """
815          Returns part of a job script which renames the produced files.
816          """
817  
818 <        txt = '\n'
818 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
819          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
820          txt += 'echo ">>> current directory content:"\n'
821 <        txt += 'ls \n'
821 >        if self.debug_wrapper==1:
822 >            txt += 'ls -Al\n'
823          txt += '\n'
824  
1106        txt += 'output_exit_status=0\n'
1107
1108        for fileWithSuffix in (self.output_file_sandbox):
1109            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1110            txt += '\n'
1111            txt += '# check output file\n'
1112            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1113            txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1114            txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1115            txt += 'else\n'
1116            txt += '    exit_status=60302\n'
1117            txt += '    echo "ERROR: Output file '+fileWithSuffix+' not found"\n'
1118            if common.scheduler.name() == 'CONDOR_G':
1119                txt += '    if [ $middleware == OSG ]; then \n'
1120                txt += '        echo "prepare dummy output file"\n'
1121                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1122                txt += '    fi \n'
1123            txt += 'fi\n'
1124
825          for fileWithSuffix in (self.output_file):
826 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
826 >            output_file_num = numberFile(fileWithSuffix, '$NJob')
827              txt += '\n'
828              txt += '# check output file\n'
829              txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
# Line 1134 | Line 834 | class Cmssw(JobType):
834                  txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
835                  txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
836              txt += 'else\n'
837 <            txt += '    exit_status=60302\n'
838 <            txt += '    echo "ERROR: Output file '+fileWithSuffix+' not found"\n'
839 <            txt += '    echo "JOB_EXIT_STATUS = $exit_status"\n'
1140 <            txt += '    output_exit_status=$exit_status\n'
1141 <            if common.scheduler.name() == 'CONDOR_G':
837 >            txt += '    job_exit_code=60302\n'
838 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
839 >            if common.scheduler.name().upper() == 'CONDOR_G':
840                  txt += '    if [ $middleware == OSG ]; then \n'
841                  txt += '        echo "prepare dummy output file"\n'
842                  txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
# Line 1146 | Line 844 | class Cmssw(JobType):
844              txt += 'fi\n'
845          file_list = []
846          for fileWithSuffix in (self.output_file):
847 <             file_list.append(self.numberFile_(fileWithSuffix, '$NJob'))
847 >             file_list.append(numberFile('$SOFTWARE_DIR/'+fileWithSuffix, '$NJob'))
848  
849 <        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
849 >        txt += 'file_list="'+string.join(file_list,',')+'"\n'
850          txt += '\n'
851          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
852          txt += 'echo ">>> current directory content:"\n'
853 <        txt += 'ls \n'
853 >        if self.debug_wrapper==1:
854 >            txt += 'ls -Al\n'
855          txt += '\n'
856          txt += 'cd $RUNTIME_AREA\n'
857          txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
858          return txt
859  
1161    def numberFile_(self, file, txt):
1162        """
1163        append _'txt' before last extension of a file
1164        """
1165        p = string.split(file,".")
1166        # take away last extension
1167        name = p[0]
1168        for x in p[1:-1]:
1169            name=name+"."+x
1170        # add "_txt"
1171        if len(p)>1:
1172            ext = p[len(p)-1]
1173            result = name + '_' + txt + "." + ext
1174        else:
1175            result = name + '_' + txt
1176
1177        return result
1178
860      def getRequirements(self, nj=[]):
861          """
862          return job requirements to add to jdl files
# Line 1185 | Line 866 | class Cmssw(JobType):
866              req='Member("VO-cms-' + \
867                   self.version + \
868                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
869 <        ## SL add requirement for OS version only if SL4
1189 <        #reSL4 = re.compile( r'slc4' )
1190 <        if self.executable_arch: # and reSL4.search(self.executable_arch):
869 >        if self.executable_arch:
870              req+=' && Member("VO-cms-' + \
871                   self.executable_arch + \
872                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
873  
874          req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
875 +        if ( common.scheduler.name() == "glitecoll" ) or ( common.scheduler.name() == "glite"):
876 +            req += ' && other.GlueCEStateStatus == "Production" '
877  
878          return req
879  
880      def configFilename(self):
881          """ return the config filename """
882 <        return self.name()+'.cfg'
882 >        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
883 >        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
884 >          return self.name()+'.py'
885 >        else:
886 >          return self.name()+'.cfg'
887  
888      def wsSetupCMSOSGEnvironment_(self):
889          """
890          Returns part of a job script which is prepares
891          the execution environment and which is common for all CMS jobs.
892          """
893 <        txt = '    echo ">>> setup CMS OSG environment:"\n'
893 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
894 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
895          txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
896          txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
897          txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
# Line 1213 | Line 899 | class Cmssw(JobType):
899          txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
900          txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
901          txt += '    else\n'
902 <        txt += '        echo "SET_CMS_ENV 10020 ==> ERROR $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
903 <        txt += '        echo "JOB_EXIT_STATUS = 10020"\n'
904 <        txt += '        echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1219 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1220 <        txt += '\n'
1221 <        txt += '        cd $RUNTIME_AREA\n'
1222 <        txt += '        echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
1223 <        txt += '        echo ">>> Remove working directory: $WORKING_DIR"\n'
1224 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
1225 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
1226 <        txt += '            echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1227 <        txt += '            echo "JOB_EXIT_STATUS = 10017"\n'
1228 <        txt += '            echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1229 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
1230 <        txt += '        fi\n'
1231 <        txt += '\n'
1232 <        txt += '        exit 1\n'
902 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
903 >        txt += '        job_exit_code=10020\n'
904 >        txt += '        func_exit\n'
905          txt += '    fi\n'
906          txt += '\n'
907 <        txt += '    echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
907 >        txt += '    echo "==> setup cms environment ok"\n'
908          txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
909  
910          return txt
911  
1240    ### OLI_DANIELE
912      def wsSetupCMSLCGEnvironment_(self):
913          """
914          Returns part of a job script which is prepares
915          the execution environment and which is common for all CMS jobs.
916          """
917 <        txt = '    echo ">>> setup CMS LCG environment:"\n'
917 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
918 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
919          txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
920          txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
921          txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
922          txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
923 <        txt += '        echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n'
924 <        txt += '        echo "JOB_EXIT_STATUS = 10031" \n'
925 <        txt += '        echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
1254 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1255 <        txt += '        exit 1\n'
923 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
924 >        txt += '        job_exit_code=10031\n'
925 >        txt += '        func_exit\n'
926          txt += '    else\n'
927          txt += '        echo "Sourcing environment... "\n'
928          txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
929 <        txt += '            echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
930 <        txt += '            echo "JOB_EXIT_STATUS = 10020"\n'
931 <        txt += '            echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1262 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
1263 <        txt += '            exit 1\n'
929 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
930 >        txt += '            job_exit_code=10020\n'
931 >        txt += '            func_exit\n'
932          txt += '        fi\n'
933          txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
934          txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
935          txt += '        result=$?\n'
936          txt += '        if [ $result -ne 0 ]; then\n'
937 <        txt += '            echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
938 <        txt += '            echo "JOB_EXIT_STATUS = 10032"\n'
939 <        txt += '            echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1272 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
1273 <        txt += '            exit 1\n'
937 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
938 >        txt += '            job_exit_code=10032\n'
939 >        txt += '            func_exit\n'
940          txt += '        fi\n'
941          txt += '    fi\n'
942          txt += '    \n'
943 <        txt += '    echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
943 >        txt += '    echo "==> setup cms environment ok"\n'
944          return txt
945  
946 <    ### FEDE FOR DBS OUTPUT PUBLICATION
1281 <    def modifyReport(self, nj):
946 >    def wsModifyReport(self, nj):
947          """
948          insert the part of the script that modifies the FrameworkJob Report
949          """
950  
951          txt = ''
952 <        try:
953 <            publish_data = int(self.cfg_params['USER.publish_data'])
954 <        except KeyError:
955 <            publish_data = 0
956 <        if (publish_data == 1):
1292 <            txt += 'echo ">>> Modify Job Report:" \n'
1293 <            ################ FEDE FOR DBS2 #############################################
1294 <            #txt += 'chmod a+x $SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1295 <            txt += 'chmod a+x $SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1296 <            #############################################################################
952 >        publish_data = int(self.cfg_params.get('USER.publish_data',0))
953 >        #if (publish_data == 1):
954 >        if (self.copy_data == 1):
955 >            txt = '\n#Written by cms_cmssw::wsModifyReport\n'
956 >            publish_data = int(self.cfg_params.get('USER.publish_data',0))
957  
1298            txt += 'if [ -z "$SE" ]; then\n'
1299            txt += '    SE="" \n'
1300            txt += 'fi \n'
1301            txt += 'if [ -z "$SE_PATH" ]; then\n'
1302            txt += '    SE_PATH="" \n'
1303            txt += 'fi \n'
1304            txt += 'echo "SE = $SE"\n'
1305            txt += 'echo "SE_PATH = $SE_PATH"\n'
958  
959 <            processedDataset = self.cfg_params['USER.publish_data_name']
960 <            txt += 'ProcessedDataset='+processedDataset+'\n'
961 <            #### LFN=/store/user/<user>/processedDataset_PSETHASH
1310 <            txt += 'if [ "$SE_PATH" == "" ]; then\n'
1311 <            #### FEDE: added slash in LFN ##############
959 >            txt += 'if [ $StageOutExitStatus -eq 0 ]; then\n'
960 >            txt += '    FOR_LFN=$LFNBaseName\n'
961 >            txt += 'else\n'
962              txt += '    FOR_LFN=/copy_problems/ \n'
963 <            txt += 'else \n'
964 <            txt += '    tmp=`echo $SE_PATH | awk -F \'store\' \'{print$2}\'` \n'
965 <            #####  FEDE TO BE CHANGED, BECAUSE STORE IS HARDCODED!!!! ########
966 <            txt += '    FOR_LFN=/store$tmp \n'
967 <            txt += 'fi \n'
968 <            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
963 >            txt += 'fi\n'
964 >
965 >            txt += 'echo ">>> Modify Job Report:" \n'
966 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
967 >            txt += 'echo "SE = $SE"\n'
968 >            txt += 'echo "SE_PATH = $SE_PATH"\n'
969              txt += 'echo "FOR_LFN = $FOR_LFN" \n'
970              txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1321            txt += 'echo "$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1322            txt += '$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
971  
972 +
973 +            args = 'fjr $RUNTIME_AREA/crab_fjr_$NJob.xml n_job $NJob for_lfn $FOR_LFN PrimaryDataset $PrimaryDataset  ApplicationFamily $ApplicationFamily ApplicationName $executable cmssw_version $CMSSW_VERSION psethash $PSETHASH se_name $SE se_path $SE_PATH'
974 +            if (publish_data == 1):
975 +                processedDataset = self.cfg_params['USER.publish_data_name']
976 +                txt += 'ProcessedDataset='+processedDataset+'\n'
977 +                txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
978 +                args += ' UserProcessedDataset $USER-$ProcessedDataset-$PSETHASH'
979 +
980 +            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'"\n'
981 +            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'\n'
982              txt += 'modifyReport_result=$?\n'
1325            txt += 'echo modifyReport_result = $modifyReport_result\n'
983              txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
984 <            txt += '    exit_status=1\n'
985 <            txt += '    echo "ERROR: Problem with ModifyJobReport"\n'
984 >            txt += '    modifyReport_result=70500\n'
985 >            txt += '    job_exit_code=$modifyReport_result\n'
986 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
987 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
988              txt += 'else\n'
989 <            txt += '    mv NewFrameworkJobReport.xml crab_fjr_$NJob.xml\n'
989 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
990              txt += 'fi\n'
1332        else:
1333            txt += 'echo "no data publication required"\n'
991          return txt
992  
993 <    def cleanEnv(self):
994 <        txt = ''
995 <        txt += 'if [ $middleware == OSG ]; then\n'
996 <        txt += '    cd $RUNTIME_AREA\n'
997 <        txt += '    echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
998 <        txt += '    echo ">>> Remove working directory: $WORKING_DIR"\n'
999 <        txt += '    /bin/rm -rf $WORKING_DIR\n'
1000 <        txt += '    if [ -d $WORKING_DIR ] ;then\n'
1001 <        txt += '        echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1002 <        txt += '        echo "JOB_EXIT_STATUS = 60999"\n'
1003 <        txt += '        echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1004 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
993 >    def wsParseFJR(self):
994 >        """
995 >        Parse the FrameworkJobReport to obtain useful infos
996 >        """
997 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
998 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
999 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1000 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1001 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1002 >        if self.debug_wrapper==1 :
1003 >            txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1004 >        txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1005 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1006 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1007 >        txt += '            cat $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1008 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1009 >        txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1010 >        txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
1011 >        txt += '        else\n'
1012 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1013 >        txt += '        fi\n'
1014 >        txt += '    else\n'
1015 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1016 >        txt += '    fi\n'
1017 >          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1018 >        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1019 >        txt += '        echo ">>> Executable succeded  $executable_exit_status"\n'
1020 >        ## This cannot more work given the changes on the Job argumentsJob  
1021 >        """
1022 >        if (self.datasetPath and not (self.dataset_pu or self.useParent==1)) :
1023 >          # VERIFY PROCESSED DATA
1024 >            txt += '        echo ">>> Verify list of processed files:"\n'
1025 >            txt += '        echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1026 >            txt += '        python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1027 >            txt += '        cat input-files.txt  | sort | uniq > tmp.txt\n'
1028 >            txt += '        mv tmp.txt input-files.txt\n'
1029 >            txt += '        echo "cat input-files.txt"\n'
1030 >            txt += '        echo "----------------------"\n'
1031 >            txt += '        cat input-files.txt\n'
1032 >            txt += '        cat processed-files.txt | sort | uniq > tmp.txt\n'
1033 >            txt += '        mv tmp.txt processed-files.txt\n'
1034 >            txt += '        echo "----------------------"\n'
1035 >            txt += '        echo "cat processed-files.txt"\n'
1036 >            txt += '        echo "----------------------"\n'
1037 >            txt += '        cat processed-files.txt\n'
1038 >            txt += '        echo "----------------------"\n'
1039 >            txt += '        diff -qbB input-files.txt processed-files.txt\n'
1040 >            txt += '        fileverify_status=$?\n'
1041 >            txt += '        if [ $fileverify_status -ne 0 ]; then\n'
1042 >            txt += '            executable_exit_status=30001\n'
1043 >            txt += '            echo "ERROR ==> not all input files processed"\n'
1044 >            txt += '            echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1045 >            txt += '            echo "      ==> diff input-files.txt processed-files.txt"\n'
1046 >            txt += '        fi\n'
1047 >        """
1048          txt += '    fi\n'
1049 +        txt += 'else\n'
1050 +        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1051          txt += 'fi\n'
1052          txt += '\n'
1053 +        txt += 'if [ $executable_exit_status -ne 0 ] && [ $executable_exit_status -ne 50115 ] && [ $executable_exit_status -ne 50117 ] && [ $executable_exit_status -ne 30001 ];then\n'
1054 +        txt += '    echo ">>> Executable failed  $executable_exit_status"\n'
1055 +        txt += '    echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1056 +        txt += '    echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1057 +        txt += '    job_exit_code=$executable_exit_status\n'
1058 +        txt += '    func_exit\n'
1059 +        txt += 'fi\n\n'
1060 +        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1061 +        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1062 +        txt += 'job_exit_code=$executable_exit_status\n'
1063 +
1064          return txt
1065  
1066      def setParam_(self, param, value):
# Line 1356 | Line 1069 | class Cmssw(JobType):
1069      def getParams(self):
1070          return self._params
1071  
1072 <    def uniquelist(self, old):
1360 <        """
1361 <        remove duplicates from a list
1362 <        """
1363 <        nd={}
1364 <        for e in old:
1365 <            nd[e]=0
1366 <        return nd.keys()
1367 <
1368 <
1369 <    def checkOut(self, limit):
1072 >    def outList(self,list=False):
1073          """
1074          check the dimension of the output files
1075          """
1076 <        txt = 'echo ">>> Starting output sandbox limit check :"\n'
1077 <        allOutFiles = ""
1076 >        txt = ''
1077 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1078          listOutFiles = []
1079 <        txt += 'stdoutFile=`ls *stdout` \n'
1080 <        txt += 'stderrFile=`ls *stderr` \n'
1079 >        stdout = 'CMSSW_$NJob.stdout'
1080 >        stderr = 'CMSSW_$NJob.stderr'
1081 >        if len(self.output_file) <= 0:
1082 >            msg ="WARNING: no output files name have been defined!!\n"
1083 >            msg+="\tno output files will be reported back/staged\n"
1084 >            common.logger.message(msg)
1085          if (self.return_data == 1):
1086 <            for fileOut in (self.output_file+self.output_file_sandbox):
1087 <                allOutFiles = allOutFiles + " " + self.numberFile_(fileOut, '$NJob') + " $stdoutFile $stderrFile"
1088 <        else:            
1089 <            for fileOut in (self.output_file_sandbox):
1090 <                txt += 'echo " '+fileOut+'";\n'
1091 <                allOutFiles = allOutFiles + " " + self.numberFile_(fileOut, '$NJob') + " $stdoutFile $stderrFile"
1092 <        txt += 'echo "OUTPUT files: '+str(allOutFiles)+'";\n'
1093 <        txt += 'ls -gGhrta;\n'
1094 <        txt += 'sum=0;\n'
1095 <        txt += 'for file in '+str(allOutFiles)+' ; do\n'
1096 <        txt += '    if [ -e $file ]; then\n'
1097 <        txt += '        tt=`ls -gGrta $file | awk \'{ print $3 }\'`\n'
1391 <        txt += '        sum=`expr $sum + $tt`\n'
1392 <        txt += '    else\n'
1393 <        txt += '        echo "WARNING: output file $file not found!"\n'
1394 <        txt += '    fi\n'
1395 <        txt += 'done\n'
1396 <        txt += 'echo "Total Output dimension: $sum";\n'
1397 <        txt += 'limit='+str(limit)+';\n'
1398 <        txt += 'echo "OUTPUT FILES LIMIT SET TO: $limit";\n'
1399 <        txt += 'if [ $limit -lt $sum ]; then\n'
1400 <        txt += '    echo "WARNING: output files have to big size - something will be lost;"\n'
1401 <        txt += '    echo "         checking the output file sizes..."\n'
1402 <        txt += '    tot=0;\n'
1403 <        txt += '    for filefile in '+str(allOutFiles)+' ; do\n'
1404 <        txt += '        dimFile=`ls -gGrta $filefile | awk \'{ print $3 }\';`\n'
1405 <        txt += '        tot=`expr $tot + $tt`;\n'
1406 <        txt += '        if [ $limit -lt $dimFile ]; then\n'
1407 <        txt += '            echo "deleting file: $filefile";\n'
1408 <        txt += '            rm -f $filefile\n'
1409 <        txt += '        elif [ $limit -lt $tot ]; then\n'
1410 <        txt += '            echo "deleting file: $filefile";\n'
1411 <        txt += '            rm -f $filefile\n'
1412 <        txt += '        else\n'
1413 <        txt += '            echo "saving file: $filefile"\n'
1414 <        txt += '        fi\n'
1415 <        txt += '    done\n'
1086 >            for file in (self.output_file+self.output_file_sandbox):
1087 >                listOutFiles.append(numberFile(file, '$NJob'))
1088 >            listOutFiles.append(stdout)
1089 >            listOutFiles.append(stderr)
1090 >        else:
1091 >            for file in (self.output_file_sandbox):
1092 >                listOutFiles.append(numberFile(file, '$NJob'))
1093 >            listOutFiles.append(stdout)
1094 >            listOutFiles.append(stderr)
1095 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1096 >        txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1097 >        txt += 'export filesToCheck\n'
1098  
1099 <        txt += '    ls -agGhrt;\n'
1418 <        txt += '    echo "WARNING: output files are too big in dimension: can not put in the output_sandbox.";\n'
1419 <        txt += '    echo "JOB_EXIT_STATUS = 70000";\n'
1420 <        txt += '    exit_status=70000;\n'
1421 <        txt += 'else'
1422 <        txt += '    echo "Total Output dimension $sum is fine.";\n'
1423 <        txt += 'fi\n'
1424 <        txt += 'echo "Ending output sandbox limit check"\n'
1099 >        if list : return self.output_file
1100          return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines