ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.157 by spiga, Sun Feb 17 20:13:00 2008 UTC vs.
Revision 1.365 by spiga, Tue Nov 9 21:10:07 2010 UTC

# Line 1 | Line 1
1 +
2 + __revision__ = "$Id$"
3 + __version__ = "$Revision$"
4 +
5   from JobType import JobType
2 from crab_logger import Logger
6   from crab_exceptions import *
7   from crab_util import *
5 from BlackWhiteListParser import BlackWhiteListParser
8   import common
9 + import re
10   import Scram
11 + from Splitter import JobSplitter
12 + from Downloader import Downloader
13 + try:
14 +    import json
15 + except:
16 +    import simplejson as json
17  
18 + from IMProv.IMProvNode import IMProvNode
19 + from IMProv.IMProvLoader import loadIMProvFile
20   import os, string, glob
21 + from xml.dom import pulldom
22  
23   class Cmssw(JobType):
24 <    def __init__(self, cfg_params, ncjobs):
24 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
25          JobType.__init__(self, 'CMSSW')
26 <        common.logger.debug(3,'CMSSW::__init__')
27 <
28 <        self.argsList = []
29 <
26 >        common.logger.debug('CMSSW::__init__')
27 >        self.skip_blocks = skip_blocks
28 >        self.argsList = 2
29 >        self.NumEvents=0
30          self._params = {}
31          self.cfg_params = cfg_params
32 <        # init BlackWhiteListParser
33 <        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
32 >        ### FEDE FOR MULTI ###
33 >        self.var_filter=''
34  
35 <        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',9.5))
35 >        ### Temporary patch to automatically skip the ISB size check:
36 >        self.server = self.cfg_params.get('CRAB.server_name',None) or \
37 >                      self.cfg_params.get('CRAB.use_server',0)
38 >        self.local  = common.scheduler.name().upper() in ['LSF','CAF','CONDOR','SGE','PBS']
39 >        size = 9.5
40 >        if self.server :
41 >            size = 1000
42 >        elif self.local:
43 >            size = 9999999
44 >        self.MaxTarBallSize = float(self.cfg_params.get('GRID.maxtarballsize',size))
45  
46          # number of jobs requested to be created, limit obj splitting
47          self.ncjobs = ncjobs
48  
28        log = common.logger
29
49          self.scram = Scram.Scram(cfg_params)
50          self.additional_inbox_files = []
51          self.scriptExe = ''
52          self.executable = ''
53          self.executable_arch = self.scram.getArch()
54          self.tgz_name = 'default.tgz'
36        self.additional_tgz_name = 'additional.tgz'
55          self.scriptName = 'CMSSW.sh'
56 <        self.pset = ''      #scrip use case Da
57 <        self.datasetPath = '' #scrip use case Da
56 >        self.pset = ''
57 >        self.datasetPath = ''
58  
59 +        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
60          # set FJR file name
61          self.fjrFileName = 'crab_fjr.xml'
62  
63          self.version = self.scram.getSWVersion()
64 +        common.logger.log(10-1,"CMSSW version is: "+str(self.version))
65 +        version_array = self.version.split('_')
66 +        self.CMSSW_major = 0
67 +        self.CMSSW_minor = 0
68 +        self.CMSSW_patch = 0
69 +        try:
70 +            self.CMSSW_major = int(version_array[1])
71 +            self.CMSSW_minor = int(version_array[2])
72 +            self.CMSSW_patch = int(version_array[3])
73 +        except:
74 +            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
75 +            raise CrabException(msg)
76  
77 <        #
78 <        # Try to block creation in case of arch/version mismatch
48 <        #
49 <
50 <        a = string.split(self.version, "_")
51 <
52 <        if int(a[1]) == 1 and (int(a[2]) < 5 and self.executable_arch.find('slc4') == 0):
53 <            msg = "Warning: You are using %s version of CMSSW  with %s architecture. \n--> Did you compile your libraries with SLC3? Otherwise you can find some problems running on SLC4 Grid nodes.\n"%(self.version, self.executable_arch)
54 <            common.logger.message(msg)
55 <        if int(a[1]) == 1 and (int(a[2]) >= 5 and self.executable_arch.find('slc3') == 0):
56 <            msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch)
77 >        if self.CMSSW_major < 2 or (self.CMSSW_major == 2 and self.CMSSW_minor < 1):
78 >            msg = "CRAB supports CMSSW >= 2_1_x only. Use an older CRAB version."
79              raise CrabException(msg)
80 +            """
81 +            As CMSSW versions are dropped we can drop more code:
82 +            2.x dropped: drop check for lumi range setting
83 +            """
84 +        self.checkCMSSWVersion()
85 +        ### collect Data cards
86  
87 <        common.taskDB.setDict('codeVersion',self.version)
88 <        self.setParam_('application', self.version)
87 >        ### Temporary: added to remove input file control in the case of PU
88 >        self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
89  
90 <        ### collect Data cards
90 >        tmp =  cfg_params['CMSSW.datasetpath']
91 >        common.logger.log(10-1, "CMSSW::CMSSW(): datasetPath = "+tmp)
92  
93 <        if not cfg_params.has_key('CMSSW.datasetpath'):
93 >        if tmp =='':
94              msg = "Error: datasetpath not defined "
95              raise CrabException(msg)
96 <        tmp =  cfg_params['CMSSW.datasetpath']
68 <        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
69 <        if string.lower(tmp)=='none':
96 >        elif string.lower(tmp)=='none':
97              self.datasetPath = None
98              self.selectNoInput = 1
99 +            self.primaryDataset = 'null'
100          else:
101              self.datasetPath = tmp
102              self.selectNoInput = 0
103 +            ll = len(self.datasetPath.split("/"))
104 +            if (ll < 4):
105 +                msg = 'Your datasetpath has a invalid format ' + self.datasetPath + '\n'
106 +                msg += 'Expected a path in format /PRIMARY/PROCESSED/TIER1-TIER2 or /PRIMARY/PROCESSED/TIER/METHOD for ADS'
107 +                raise CrabException(msg)
108 +            self.primaryDataset = self.datasetPath.split("/")[1]
109 +            self.dataTier = self.datasetPath.split("/")[2]
110  
111 <        # ML monitoring
112 <        # split dataset path style: /PreProdR3Minbias/SIM/GEN-SIM
113 <        if not self.datasetPath:
114 <            self.setParam_('dataset', 'None')
115 <            self.setParam_('owner', 'None')
116 <        else:
117 <            ## SL what is supposed to fail here?
118 <            try:
119 <                datasetpath_split = self.datasetPath.split("/")
120 <                # standard style
121 <                self.setParam_('datasetFull', self.datasetPath)
122 <                self.setParam_('dataset', datasetpath_split[1])
123 <                self.setParam_('owner', datasetpath_split[2])
124 <            except:
125 <                self.setParam_('dataset', self.datasetPath)
126 <                self.setParam_('owner', self.datasetPath)
92 <
93 <        self.setParam_('taskId', common.taskDB.dict('taskId'))
94 <
95 <        self.dataTiers = []
111 >        # Analysis dataset is primary/processed/tier/definition
112 >        self.ads = False
113 >        if self.datasetPath:
114 >            self.ads = len(self.datasetPath.split("/")) > 4
115 >        self.lumiMask = self.cfg_params.get('CMSSW.lumi_mask',None)
116 >        self.lumiParams = self.cfg_params.get('CMSSW.total_number_of_lumis',None) or \
117 >                          self.cfg_params.get('CMSSW.lumis_per_job',None)
118 >
119 >        # FUTURE: Can remove this check
120 >        if self.ads and self.CMSSW_major < 3:
121 >            common.logger.info('Warning: Analysis dataset support is incomplete in CMSSW 2_x.')
122 >            common.logger.info('  Only file level, not lumi level, granularity is supported.')
123 >
124 >        self.debugWrap=''
125 >        self.debug_wrapper = int(cfg_params.get('USER.debug_wrapper',0))
126 >        if self.debug_wrapper == 1: self.debugWrap='--debug'
127  
128          ## now the application
129 +        self.managedGenerators = ['madgraph', 'comphep', 'lhe']
130 +        self.generator = cfg_params.get('CMSSW.generator','pythia').lower()
131          self.executable = cfg_params.get('CMSSW.executable','cmsRun')
132 <        self.setParam_('exe', self.executable)
100 <        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
132 >        common.logger.log(10-1, "CMSSW::CMSSW(): executable = "+self.executable)
133  
134          if not cfg_params.has_key('CMSSW.pset'):
135              raise CrabException("PSet file missing. Cannot run cmsRun ")
136          self.pset = cfg_params['CMSSW.pset']
137 <        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
137 >        common.logger.log(10-1, "Cmssw::Cmssw(): PSet file = "+self.pset)
138          if self.pset.lower() != 'none' :
139              if (not os.path.exists(self.pset)):
140                  raise CrabException("User defined PSet file "+self.pset+" does not exist")
# Line 117 | Line 149 | class Cmssw(JobType):
149          self.output_file_sandbox.append(self.fjrFileName)
150  
151          # other output files to be returned via sandbox or copied to SE
152 +        outfileflag = False
153          self.output_file = []
154          tmp = cfg_params.get('CMSSW.output_file',None)
155          if tmp :
156 <            tmpOutFiles = string.split(tmp,',')
157 <            log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
125 <            for tmp in tmpOutFiles:
126 <                tmp=string.strip(tmp)
127 <                self.output_file.append(tmp)
128 <                pass
129 <        else:
130 <            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
131 <        pass
156 >            self.output_file = [x.strip() for x in tmp.split(',')]
157 >            outfileflag = True #output found
158  
133        # script_exe file as additional file in inputSandbox
159          self.scriptExe = cfg_params.get('USER.script_exe',None)
160          if self.scriptExe :
161 <           if not os.path.isfile(self.scriptExe):
162 <              msg ="ERROR. file "+self.scriptExe+" not found"
163 <              raise CrabException(msg)
164 <           self.additional_inbox_files.append(string.strip(self.scriptExe))
161 >            if not os.path.isfile(self.scriptExe):
162 >                msg ="ERROR. file "+self.scriptExe+" not found"
163 >                raise CrabException(msg)
164 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
165 >
166 >        self.AdditionalArgs = cfg_params.get('USER.script_arguments',None)
167 >        if self.AdditionalArgs : self.AdditionalArgs = string.replace(self.AdditionalArgs,',',' ')
168  
141        #CarlosDaniele
169          if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
170 <           msg ="Error. script_exe  not defined"
171 <           raise CrabException(msg)
170 >            msg ="Error. script_exe  not defined"
171 >            raise CrabException(msg)
172 >
173 >        # use parent files...
174 >        self.useParent = int(self.cfg_params.get('CMSSW.use_parent',0))
175  
176          ## additional input files
177          if cfg_params.has_key('USER.additional_input_files'):
# Line 161 | Line 191 | class Cmssw(JobType):
191                      if not os.path.exists(file):
192                          raise CrabException("Additional input file not found: "+file)
193                      pass
164                    # fname = string.split(file, '/')[-1]
165                    # storedFile = common.work_space.pathForTgz()+'share/'+fname
166                    # shutil.copyfile(file, storedFile)
194                      self.additional_inbox_files.append(string.strip(file))
195                  pass
196              pass
197 <            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
197 >            common.logger.debug("Additional input files: "+str(self.additional_inbox_files))
198          pass
199  
173        ## Events per job
174        if cfg_params.has_key('CMSSW.events_per_job'):
175            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
176            self.selectEventsPerJob = 1
177        else:
178            self.eventsPerJob = -1
179            self.selectEventsPerJob = 0
180
181        ## number of jobs
182        if cfg_params.has_key('CMSSW.number_of_jobs'):
183            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
184            self.selectNumberOfJobs = 1
185        else:
186            self.theNumberOfJobs = 0
187            self.selectNumberOfJobs = 0
188
189        if cfg_params.has_key('CMSSW.total_number_of_events'):
190            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
191            self.selectTotalNumberEvents = 1
192        else:
193            self.total_number_of_events = 0
194            self.selectTotalNumberEvents = 0
195
196        if self.pset != None: #CarlosDaniele
197             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
198                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
199                 raise CrabException(msg)
200        else:
201             if (self.selectNumberOfJobs == 0):
202                 msg = 'Must specify  number_of_jobs.'
203                 raise CrabException(msg)
204
205        ## source seed for pythia
206        self.sourceSeed = cfg_params.get('CMSSW.pythia_seed',None)
207
208        self.sourceSeedVtx = cfg_params.get('CMSSW.vtx_seed',None)
209
210        self.sourceSeedG4 = cfg_params.get('CMSSW.g4_seed',None)
200  
201 <        self.sourceSeedMix = cfg_params.get('CMSSW.mix_seed',None)
202 <
203 <        self.firstRun = cfg_params.get('CMSSW.first_run',None)
204 <
205 <        if self.pset != None: #CarlosDaniele
206 <            import PsetManipulator as pp
207 <            PsetEdit = pp.PsetManipulator(self.pset) #Daniele Pset
208 <
209 <        # Copy/return
201 >        ## New method of dealing with seeds
202 >        self.incrementSeeds = []
203 >        self.preserveSeeds = []
204 >        if cfg_params.has_key('CMSSW.preserve_seeds'):
205 >            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
206 >            for tmp in tmpList:
207 >                tmp.strip()
208 >                self.preserveSeeds.append(tmp)
209 >        if cfg_params.has_key('CMSSW.increment_seeds'):
210 >            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
211 >            for tmp in tmpList:
212 >                tmp.strip()
213 >                self.incrementSeeds.append(tmp)
214  
215 +        # Copy/return/publish
216          self.copy_data = int(cfg_params.get('USER.copy_data',0))
217          self.return_data = int(cfg_params.get('USER.return_data',0))
218 +        self.publish_data = int(cfg_params.get('USER.publish_data',0))
219 +        if (self.publish_data == 1):
220 +            if not cfg_params.has_key('USER.publish_data_name'):
221 +                raise CrabException('Cannot publish output data, because you did not specify USER.publish_data_name parameter in the crab.cfg file')
222 +            else:
223 +                self.processedDataset = cfg_params['USER.publish_data_name']
224  
225 +        self.conf = {}
226 +        self.conf['pubdata'] = None
227 +        # number of jobs requested to be created, limit obj splitting DD
228          #DBSDLS-start
229          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
230          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
# Line 230 | Line 233 | class Cmssw(JobType):
233          ## Perform the data location and discovery (based on DBS/DLS)
234          ## SL: Don't if NONE is specified as input (pythia use case)
235          blockSites = {}
236 <        if self.datasetPath:
237 <            blockSites = self.DataDiscoveryAndLocation(cfg_params)
238 <        #DBSDLS-end
236 > #wmbs
237 >        self.automation = int(self.cfg_params.get('WMBS.automation',0))
238 >        if self.automation == 0:
239 >            if self.datasetPath:
240 >                blockSites = self.DataDiscoveryAndLocation(cfg_params)
241 >            #DBSDLS-end
242 >            self.conf['blockSites']=blockSites
243 >
244 >            ## Select Splitting
245 >            splitByRun = int(cfg_params.get('CMSSW.split_by_run',0))
246 >
247 >            if self.selectNoInput:
248 >                if self.pset == None:
249 >                    self.algo = 'ForScript'
250 >                else:
251 >                    self.algo = 'NoInput'
252 >                    self.conf['managedGenerators']=self.managedGenerators
253 >                    self.conf['generator']=self.generator
254 >            elif self.ads or self.lumiMask or self.lumiParams:
255 >                self.algo = 'LumiBased'
256 >                if splitByRun:
257 >                    msg = "Cannot combine split by run with lumi_mask, ADS, " \
258 >                          "or lumis_per_job. Use split by lumi mode instead."
259 >                    raise CrabException(msg)
260 >
261 >            elif splitByRun ==1:
262 >                self.algo = 'RunBased'
263 >            else:
264 >                self.algo = 'EventBased'
265 >            common.logger.debug("Job splitting method: %s" % self.algo)
266  
267 <        self.tgzNameWithPath = self.getTarBall(self.executable)
267 >            splitter = JobSplitter(self.cfg_params,self.conf)
268 >            self.dict = splitter.Algos()[self.algo]()
269  
270 <        ## Select Splitting
271 <        if self.selectNoInput:
272 <            if self.pset == None: #CarlosDaniele
273 <                self.jobSplittingForScript()
270 >        self.argsFile= '%s/arguments.xml'%common.work_space.shareDir()
271 >        self.rootArgsFilename= 'arguments'
272 >        # modify Pset only the first time
273 >        if isNew:
274 >            if self.pset != None: self.ModifyPset()
275 >
276 >            ## Prepare inputSandbox TarBall (only the first time)
277 >            self.tarNameWithPath = self.getTarBall(self.executable)
278 >
279 >
280 >    def ModifyPset(self):
281 >        import PsetManipulator as pp
282 >
283 >        # If pycfg_params set, fake out the config script
284 >        # to make it think it was called with those args
285 >        pycfg_params = self.cfg_params.get('CMSSW.pycfg_params',None)
286 >        if pycfg_params:
287 >            trueArgv = sys.argv
288 >            sys.argv = [self.pset]
289 >            sys.argv.extend(pycfg_params.split(' '))
290 >        PsetEdit = pp.PsetManipulator(self.pset)
291 >        if pycfg_params: # Restore original sys.argv
292 >            sys.argv = trueArgv
293 >
294 >        try:
295 >            # Add FrameworkJobReport to parameter-set, set max events.
296 >            # Reset later for data jobs by writeCFG which does all modifications
297 >            PsetEdit.maxEvent(1)
298 >            PsetEdit.skipEvent(0)
299 >            PsetEdit.psetWriter(self.configFilename())
300 >            ## If present, add TFileService to output files
301 >            if not int(self.cfg_params.get('CMSSW.skip_tfileservice_output',0)):
302 >                tfsOutput = PsetEdit.getTFileService()
303 >                if tfsOutput:
304 >                    if tfsOutput in self.output_file:
305 >                        common.logger.debug("Output from TFileService "+tfsOutput+" already in output files")
306 >                    else:
307 >                        outfileflag = True #output found
308 >                        self.output_file.append(tfsOutput)
309 >                        common.logger.info("Adding "+tfsOutput+" (from TFileService) to list of output files")
310 >                    pass
311 >                pass
312 >
313 >            # If requested, add PoolOutputModule to output files
314 >            ### FEDE FOR MULTI ###
315 >            #edmOutput = PsetEdit.getPoolOutputModule()
316 >            edmOutputDict = PsetEdit.getPoolOutputModule()
317 >            common.logger.debug("(test) edmOutputDict = "+str(edmOutputDict))
318 >            filter_dict = {}
319 >            for key in edmOutputDict.keys():
320 >                filter_dict[key]=edmOutputDict[key]['dataset']
321 >            common.logger.debug("(test) filter_dict for multi =  "+str(filter_dict))
322 >
323 >            #### in CMSSW.sh: export var_filter
324 >
325 >            self.var_filter = json.dumps(filter_dict)
326 >            common.logger.debug("(test) var_filter for multi =  "+self.var_filter)
327 >
328 >            edmOutput = edmOutputDict.keys()
329 >            if int(self.cfg_params.get('CMSSW.get_edm_output',0)):
330 >                if edmOutput:
331 >                    for outputFile in edmOutput:
332 >                        if outputFile in self.output_file:
333 >                            common.logger.debug("Output from PoolOutputModule "+outputFile+" already in output files")
334 >                        else:
335 >                            self.output_file.append(outputFile)
336 >                            common.logger.info("Adding "+outputFile+" (from PoolOutputModule) to list of output files")
337 >            # not requested, check anyhow to avoid accidental T2 overload
338              else:
339 <                self.jobSplittingNoInput()
340 <        else:
341 <            self.jobSplittingByBlocks(blockSites)
339 >                if edmOutput:
340 >                    missedFiles = []
341 >                    for outputFile in edmOutput:
342 >                        if outputFile not in self.output_file:
343 >                            missedFiles.append(outputFile)
344 >                    if missedFiles:
345 >                        msg  = "ERROR: PoolOutputModule(s) are present in your ParameteSet %s \n"%self.pset
346 >                        msg += "    but the file(s) produced ( %s ) are not in the list of output files\n" % ', '.join(missedFiles)
347 >                        msg += "WARNING: please remove them. If you want to keep them, add the file(s) to output_files or use CMSSW.get_edm_output = 1\n"
348 >                        if int(self.cfg_params.get('CMSSW.ignore_edm_output',0)):
349 >                            msg += "    CMSSW.ignore_edm_output==1 : Hope you know what you are doing...\n"
350 >                            common.logger.info(msg)
351 >                        else :
352 >                            raise CrabException(msg)
353  
354 <        # modify Pset
355 <        if self.pset != None: #CarlosDaniele
356 <            try:
357 <                if (self.datasetPath): # standard job
358 <                    # allow to processa a fraction of events in a file
359 <                    PsetEdit.inputModule("INPUTFILE")
360 <                    PsetEdit.maxEvent(0)
361 <                    PsetEdit.skipEvent(0)
362 <                else:  # pythia like job
363 <                    PsetEdit.maxEvent(self.eventsPerJob)
364 <                    if (self.firstRun):
365 <                        PsetEdit.pythiaFirstRun(0)  #First Run
366 <                    if (self.sourceSeed) :
261 <                        PsetEdit.pythiaSeed(0)
262 <                        if (self.sourceSeedVtx) :
263 <                            PsetEdit.vtxSeed(0)
264 <                        if (self.sourceSeedG4) :
265 <                            PsetEdit.g4Seed(0)
266 <                        if (self.sourceSeedMix) :
267 <                            PsetEdit.mixSeed(0)
268 <                # add FrameworkJobReport to parameter-set
269 <                PsetEdit.addCrabFJR(self.fjrFileName)
270 <                PsetEdit.psetWriter(self.configFilename())
271 <            except:
272 <                msg='Error while manipuliating ParameterSet: exiting...'
354 >            if (PsetEdit.getBadFilesSetting()):
355 >                msg = "WARNING: You have set skipBadFiles to True. This will continue processing on some errors and you may not be notified."
356 >                common.logger.info(msg)
357 >
358 >        except CrabException, msg:
359 >            common.logger.info(str(msg))
360 >            msg='Error while manipulating ParameterSet (see previous message, if any): exiting...'
361 >            raise CrabException(msg)
362 >
363 >        valid = re.compile('^[\w\.\-]+$')
364 >        for fileName in self.output_file:
365 >            if not valid.match(fileName):
366 >                msg = "The file %s may only contain alphanumeric characters and -, _, ." % fileName
367                  raise CrabException(msg)
368  
369 +
370      def DataDiscoveryAndLocation(self, cfg_params):
371  
372          import DataDiscovery
373          import DataLocation
374 <        common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
374 >        common.logger.log(10-1,"CMSSW::DataDiscoveryAndLocation()")
375  
376          datasetPath=self.datasetPath
377  
378          ## Contact the DBS
379 <        common.logger.message("Contacting Data Discovery Services ...")
379 >        common.logger.info("Contacting Data Discovery Services ...")
380          try:
381 <            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
381 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
382              self.pubdata.fetchDBSInfo()
383  
384          except DataDiscovery.NotExistingDatasetError, ex :
# Line 297 | Line 392 | class Cmssw(JobType):
392              raise CrabException(msg)
393  
394          self.filesbyblock=self.pubdata.getFiles()
395 <        self.eventsbyblock=self.pubdata.getEventsPerBlock()
301 <        self.eventsbyfile=self.pubdata.getEventsPerFile()
395 >        self.conf['pubdata']=self.pubdata
396  
397          ## get max number of events
398 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
398 >        self.maxEvents=self.pubdata.getMaxEvents()
399  
400          ## Contact the DLS and build a list of sites hosting the fileblocks
401          try:
402              dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
403              dataloc.fetchDLSInfo()
404 +
405          except DataLocation.DataLocationError , ex:
406              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
407              raise CrabException(msg)
408  
409  
410 <        sites = dataloc.getSites()
410 >        unsorted_sites = dataloc.getSites()
411 >        sites = self.filesbyblock.fromkeys(self.filesbyblock,'')
412 >        for lfn in self.filesbyblock.keys():
413 >            if unsorted_sites.has_key(lfn):
414 >                sites[lfn]=unsorted_sites[lfn]
415 >            else:
416 >                sites[lfn]=[]
417 >
418 >        if len(sites)==0:
419 >            msg = 'ERROR ***: no location for any of the blocks of this dataset: \n\t %s \n'%datasetPath
420 >            msg += "\tMaybe the dataset is located only at T1's (or at T0), where analysis jobs are not allowed\n"
421 >            msg += "\tPlease check DataDiscovery page https://cmsweb.cern.ch/dbs_discovery/\n"
422 >            raise CrabException(msg)
423 >
424          allSites = []
425          listSites = sites.values()
426          for listSite in listSites:
427              for oneSite in listSite:
428                  allSites.append(oneSite)
429 <        allSites = self.uniquelist(allSites)
429 >        [allSites.append(it) for it in allSites if not allSites.count(it)]
430  
323        # screen output
324        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
431  
432 <        return sites
433 <
434 <    def setArgsList(self, argsList):
435 <        self.argsList = argsList
330 <
331 <    def jobSplittingByBlocks(self, blockSites):
332 <        """
333 <        Perform job splitting. Jobs run over an integer number of files
334 <        and no more than one block.
335 <        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
336 <        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
337 <                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
338 <                  self.maxEvents, self.filesbyblock
339 <        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
340 <              self.total_number_of_jobs - Total # of jobs
341 <              self.list_of_args - File(s) job will run on (a list of lists)
342 <        """
343 <
344 <        # ---- Handle the possible job splitting configurations ---- #
345 <        if (self.selectTotalNumberEvents):
346 <            totalEventsRequested = self.total_number_of_events
347 <        if (self.selectEventsPerJob):
348 <            eventsPerJobRequested = self.eventsPerJob
349 <            if (self.selectNumberOfJobs):
350 <                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
351 <
352 <        # If user requested all the events in the dataset
353 <        if (totalEventsRequested == -1):
354 <            eventsRemaining=self.maxEvents
355 <        # If user requested more events than are in the dataset
356 <        elif (totalEventsRequested > self.maxEvents):
357 <            eventsRemaining = self.maxEvents
358 <            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
359 <        # If user requested less events than are in the dataset
432 >        # screen output
433 >        if self.ads or self.lumiMask:
434 >            common.logger.info("Requested (A)DS %s has %s block(s)." %
435 >                               (datasetPath, len(self.filesbyblock.keys())))
436          else:
437 <            eventsRemaining = totalEventsRequested
438 <
439 <        # If user requested more events per job than are in the dataset
364 <        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
365 <            eventsPerJobRequested = self.maxEvents
366 <
367 <        # For user info at end
368 <        totalEventCount = 0
369 <
370 <        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
371 <            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
372 <
373 <        if (self.selectNumberOfJobs):
374 <            common.logger.message("May not create the exact number_of_jobs requested.")
437 >            common.logger.info("Requested dataset: " + datasetPath + \
438 >                " has " + str(self.maxEvents) + " events in " + \
439 >                str(len(self.filesbyblock.keys())) + " blocks.\n")
440  
441 <        if ( self.ncjobs == 'all' ) :
377 <            totalNumberOfJobs = 999999999
378 <        else :
379 <            totalNumberOfJobs = self.ncjobs
380 <
381 <
382 <        blocks = blockSites.keys()
383 <        blockCount = 0
384 <        # Backup variable in case self.maxEvents counted events in a non-included block
385 <        numBlocksInDataset = len(blocks)
386 <
387 <        jobCount = 0
388 <        list_of_lists = []
389 <
390 <        # list tracking which jobs are in which jobs belong to which block
391 <        jobsOfBlock = {}
392 <
393 <        # ---- Iterate over the blocks in the dataset until ---- #
394 <        # ---- we've met the requested total # of events    ---- #
395 <        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
396 <            block = blocks[blockCount]
397 <            blockCount += 1
398 <            if block not in jobsOfBlock.keys() :
399 <                jobsOfBlock[block] = []
400 <
401 <            if self.eventsbyblock.has_key(block) :
402 <                numEventsInBlock = self.eventsbyblock[block]
403 <                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
404 <
405 <                files = self.filesbyblock[block]
406 <                numFilesInBlock = len(files)
407 <                if (numFilesInBlock <= 0):
408 <                    continue
409 <                fileCount = 0
410 <
411 <                # ---- New block => New job ---- #
412 <                parString = ""
413 <                # counter for number of events in files currently worked on
414 <                filesEventCount = 0
415 <                # flag if next while loop should touch new file
416 <                newFile = 1
417 <                # job event counter
418 <                jobSkipEventCount = 0
419 <
420 <                # ---- Iterate over the files in the block until we've met the requested ---- #
421 <                # ---- total # of events or we've gone over all the files in this block  ---- #
422 <                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
423 <                    file = files[fileCount]
424 <                    if newFile :
425 <                        try:
426 <                            numEventsInFile = self.eventsbyfile[file]
427 <                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
428 <                            # increase filesEventCount
429 <                            filesEventCount += numEventsInFile
430 <                            # Add file to current job
431 <                            parString += '\\\"' + file + '\\\"\,'
432 <                            newFile = 0
433 <                        except KeyError:
434 <                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
435 <
436 <
437 <                    # if less events in file remain than eventsPerJobRequested
438 <                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) :
439 <                        # if last file in block
440 <                        if ( fileCount == numFilesInBlock-1 ) :
441 <                            # end job using last file, use remaining events in block
442 <                            # close job and touch new file
443 <                            fullString = parString[:-2]
444 <                            list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
445 <                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
446 <                            self.jobDestination.append(blockSites[block])
447 <                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
448 <                            # fill jobs of block dictionary
449 <                            jobsOfBlock[block].append(jobCount+1)
450 <                            # reset counter
451 <                            jobCount = jobCount + 1
452 <                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
453 <                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
454 <                            jobSkipEventCount = 0
455 <                            # reset file
456 <                            parString = ""
457 <                            filesEventCount = 0
458 <                            newFile = 1
459 <                            fileCount += 1
460 <                        else :
461 <                            # go to next file
462 <                            newFile = 1
463 <                            fileCount += 1
464 <                    # if events in file equal to eventsPerJobRequested
465 <                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
466 <                        # close job and touch new file
467 <                        fullString = parString[:-2]
468 <                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
469 <                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
470 <                        self.jobDestination.append(blockSites[block])
471 <                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
472 <                        jobsOfBlock[block].append(jobCount+1)
473 <                        # reset counter
474 <                        jobCount = jobCount + 1
475 <                        totalEventCount = totalEventCount + eventsPerJobRequested
476 <                        eventsRemaining = eventsRemaining - eventsPerJobRequested
477 <                        jobSkipEventCount = 0
478 <                        # reset file
479 <                        parString = ""
480 <                        filesEventCount = 0
481 <                        newFile = 1
482 <                        fileCount += 1
441 >        return sites
442  
484                    # if more events in file remain than eventsPerJobRequested
485                    else :
486                        # close job but don't touch new file
487                        fullString = parString[:-2]
488                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
489                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
490                        self.jobDestination.append(blockSites[block])
491                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
492                        jobsOfBlock[block].append(jobCount+1)
493                        # increase counter
494                        jobCount = jobCount + 1
495                        totalEventCount = totalEventCount + eventsPerJobRequested
496                        eventsRemaining = eventsRemaining - eventsPerJobRequested
497                        # calculate skip events for last file
498                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
499                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
500                        # remove all but the last file
501                        filesEventCount = self.eventsbyfile[file]
502                        parString = ""
503                        parString += '\\\"' + file + '\\\"\,'
504                    pass # END if
505                pass # END while (iterate over files in the block)
506        pass # END while (iterate over blocks in the dataset)
507        self.ncjobs = self.total_number_of_jobs = jobCount
508        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
509            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
510        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
443  
444 <        # screen output
513 <        screenOutput = "List of jobs and available destination sites:\n\n"
444 >    def split(self, jobParams,firstJobID):
445  
446 <        # keep trace of block with no sites to print a warning at the end
447 <        noSiteBlock = []
448 <        bloskNoSite = []
449 <
450 <        blockCounter = 0
451 <        for block in blocks:
452 <            if block in jobsOfBlock.keys() :
453 <                blockCounter += 1
523 <                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
524 <                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
525 <                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
526 <                    bloskNoSite.append( blockCounter )
527 <
528 <        common.logger.message(screenOutput)
529 <        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
530 <            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
531 <            virgola = ""
532 <            if len(bloskNoSite) > 1:
533 <                virgola = ","
534 <            for block in bloskNoSite:
535 <                msg += ' ' + str(block) + virgola
536 <            msg += '\n               Related jobs:\n                 '
537 <            virgola = ""
538 <            if len(noSiteBlock) > 1:
539 <                virgola = ","
540 <            for range_jobs in noSiteBlock:
541 <                msg += str(range_jobs) + virgola
542 <            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
543 <            if self.cfg_params.has_key('EDG.se_white_list'):
544 <                msg += 'WARNING: SE White List: '+self.cfg_params['EDG.se_white_list']+'\n'
545 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
546 <                msg += 'Please check if the dataset is available at this site!)\n'
547 <            if self.cfg_params.has_key('EDG.ce_white_list'):
548 <                msg += 'WARNING: CE White List: '+self.cfg_params['EDG.ce_white_list']+'\n'
549 <                msg += '(Hint: By whitelisting you force the job to run at this particular site(s).\n'
550 <                msg += 'Please check if the dataset is available at this site!)\n'
446 >        jobParams = self.dict['args']
447 >        njobs = self.dict['njobs']
448 >        self.jobDestination = self.dict['jobDestination']
449 >
450 >        if njobs == 0:
451 >            raise CrabException("Asked to split zero jobs: aborting")
452 >        if not self.server and not self.local and njobs > 500:
453 >            raise CrabException("The CRAB client will not submit more than 500 jobs. You must use the server mode.")
454  
455 <            common.logger.message(msg)
455 >        # create the empty structure
456 >        for i in range(njobs):
457 >            jobParams.append("")
458  
459 <        self.list_of_args = list_of_lists
459 >        listID=[]
460 >        listField=[]
461 >        listDictions=[]
462 >        exist= os.path.exists(self.argsFile)
463 >        for id in range(njobs):
464 >            job = id + int(firstJobID)
465 >            listID.append(job+1)
466 >            job_ToSave ={}
467 >            concString = ' '
468 >            argu=''
469 >            str_argu = str(job+1)
470 >            if len(jobParams[id]):
471 >                argu = {'JobID': job+1}
472 >                for i in range(len(jobParams[id])):
473 >                    argu[self.dict['params'][i]]=jobParams[id][i]
474 >                    if len(jobParams[id])==1: self.NumEvents = jobParams[id][i]
475 >                # just for debug
476 >                str_argu += concString.join(jobParams[id])
477 >            if argu != '': listDictions.append(argu)
478 >            job_ToSave['arguments']= '%d %d'%( (job+1), 0)
479 >            job_ToSave['dlsDestination']= self.jobDestination[id]
480 >            listField.append(job_ToSave)
481 >            from ProdCommon.SiteDB.CmsSiteMapper import CmsSEMap
482 >            cms_se = CmsSEMap()
483 >            msg="Job  %s  Arguments:  %s\n"%(str(job+1),str_argu)
484 >            msg+="\t  Destination: %s "%(str(self.jobDestination[id]))
485 >            SEDestination = [cms_se[dest] for dest in self.jobDestination[id]]
486 >            msg+="\t  CMSDestination: %s "%(str(SEDestination))
487 >            common.logger.log(10-1,msg)
488 >        # write xml
489 >        if len(listDictions):
490 >            if exist==False: self.CreateXML()
491 >            self.addEntry(listDictions)
492 >        common._db.updateJob_(listID,listField)
493          return
494  
495 <    def jobSplittingNoInput(self):
495 >    def CreateXML(self):
496          """
559        Perform job splitting based on number of event per job
497          """
498 <        common.logger.debug(5,'Splitting per events')
499 <
563 <        if (self.selectEventsPerJob):
564 <            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
565 <        if (self.selectNumberOfJobs):
566 <            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
567 <        if (self.selectTotalNumberEvents):
568 <            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
569 <
570 <        if (self.total_number_of_events < 0):
571 <            msg='Cannot split jobs per Events with "-1" as total number of events'
572 <            raise CrabException(msg)
573 <
574 <        if (self.selectEventsPerJob):
575 <            if (self.selectTotalNumberEvents):
576 <                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
577 <            elif(self.selectNumberOfJobs) :
578 <                self.total_number_of_jobs =self.theNumberOfJobs
579 <                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
580 <
581 <        elif (self.selectNumberOfJobs) :
582 <            self.total_number_of_jobs = self.theNumberOfJobs
583 <            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
584 <
585 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
586 <
587 <        # is there any remainder?
588 <        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
589 <
590 <        common.logger.debug(5,'Check  '+str(check))
591 <
592 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
593 <        if check > 0:
594 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
595 <
596 <        # argument is seed number.$i
597 <        self.list_of_args = []
598 <        for i in range(self.total_number_of_jobs):
599 <            ## Since there is no input, any site is good
600 <            self.jobDestination.append([""]) #must be empty to write correctly the xml
601 <            args=[]
602 <            if (self.firstRun):
603 <                ## pythia first run
604 <                args.append(str(self.firstRun)+str(i))
605 <            if (self.sourceSeed):
606 <                args.append(str(self.sourceSeed)+str(i))
607 <                if (self.sourceSeedVtx):
608 <                    ## + vtx random seed
609 <                    args.append(str(self.sourceSeedVtx)+str(i))
610 <                if (self.sourceSeedG4):
611 <                    ## + G4 random seed
612 <                    args.append(str(self.sourceSeedG4)+str(i))
613 <                if (self.sourceSeedMix):
614 <                    ## + Mix random seed
615 <                    args.append(str(self.sourceSeedMix)+str(i))
616 <                pass
617 <            pass
618 <            self.list_of_args.append(args)
619 <        pass
620 <
498 >        result = IMProvNode( self.rootArgsFilename )
499 >        outfile = file( self.argsFile, 'w').write(str(result))
500          return
501  
502 <
624 <    def jobSplittingForScript(self):#CarlosDaniele
502 >    def addEntry(self, listDictions):
503          """
504 <        Perform job splitting based on number of job
627 <        """
628 <        common.logger.debug(5,'Splitting per job')
629 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
630 <
631 <        self.total_number_of_jobs = self.theNumberOfJobs
632 <
633 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
634 <
635 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
636 <
637 <        # argument is seed number.$i
638 <        self.list_of_args = []
639 <        for i in range(self.total_number_of_jobs):
640 <            ## Since there is no input, any site is good
641 <           # self.jobDestination.append(["Any"])
642 <            self.jobDestination.append([""])
643 <            ## no random seed
644 <            self.list_of_args.append([str(i)])
645 <        return
504 >        _addEntry_
505  
506 <    def split(self, jobParams):
507 <
508 <        common.jobDB.load()
509 <        #### Fabio
510 <        njobs = self.total_number_of_jobs
511 <        arglist = self.list_of_args
512 <        # create the empty structure
513 <        for i in range(njobs):
514 <            jobParams.append("")
656 <
657 <        for job in range(njobs):
658 <            jobParams[job] = arglist[job]
659 <            # print str(arglist[job])
660 <            # print jobParams[job]
661 <            common.jobDB.setArguments(job, jobParams[job])
662 <            common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job]))
663 <            common.jobDB.setDestination(job, self.jobDestination[job])
664 <
665 <        common.jobDB.save()
506 >        add an entry to the xml file
507 >        """
508 >        ## load xml
509 >        improvDoc = loadIMProvFile(self.argsFile)
510 >        entrname= 'Job'
511 >        for dictions in listDictions:
512 >           report = IMProvNode(entrname , None, **dictions)
513 >           improvDoc.addNode(report)
514 >        outfile = file( self.argsFile, 'w').write(str(improvDoc))
515          return
516  
668    def getJobTypeArguments(self, nj, sched):
669        result = ''
670        for i in common.jobDB.arguments(nj):
671            result=result+str(i)+" "
672        return result
673
517      def numberOfJobs(self):
518 <        # Fabio
519 <        return self.total_number_of_jobs
518 > #wmbs
519 >        if self.automation==0:
520 >           return self.dict['njobs']
521 >        else:
522 >           return None
523  
524      def getTarBall(self, exe):
525          """
526          Return the TarBall with lib and exe
527          """
528 <
683 <        # if it exist, just return it
684 <        #
685 <        # Marco. Let's start to use relative path for Boss XML files
686 <        #
687 <        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
528 >        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
529          if os.path.exists(self.tgzNameWithPath):
530              return self.tgzNameWithPath
531  
# Line 697 | Line 538 | class Cmssw(JobType):
538  
539          # First of all declare the user Scram area
540          swArea = self.scram.getSWArea_()
700        #print "swArea = ", swArea
701        # swVersion = self.scram.getSWVersion()
702        # print "swVersion = ", swVersion
541          swReleaseTop = self.scram.getReleaseTop_()
704        #print "swReleaseTop = ", swReleaseTop
542  
543          ## check if working area is release top
544          if swReleaseTop == '' or swArea == swReleaseTop:
545 +            common.logger.debug("swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
546              return
547  
548          import tarfile
# Line 719 | Line 557 | class Cmssw(JobType):
557                  ## then check if it's private or not
558                  if exeWithPath.find(swReleaseTop) == -1:
559                      # the exe is private, so we must ship
560 <                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
560 >                    common.logger.debug("Exe "+exeWithPath+" to be tarred")
561                      path = swArea+'/'
562                      # distinguish case when script is in user project area or given by full path somewhere else
563                      if exeWithPath.find(path) >= 0 :
# Line 733 | Line 571 | class Cmssw(JobType):
571                      pass
572  
573              ## Now get the libraries: only those in local working area
574 +            tar.dereference=True
575              libDir = 'lib'
576              lib = swArea+'/' +libDir
577 <            common.logger.debug(5,"lib "+lib+" to be tarred")
577 >            common.logger.debug("lib "+lib+" to be tarred")
578              if os.path.exists(lib):
579                  tar.add(lib,libDir)
580  
# Line 744 | Line 583 | class Cmssw(JobType):
583              module = swArea + '/' + moduleDir
584              if os.path.isdir(module):
585                  tar.add(module,moduleDir)
586 +            tar.dereference=False
587  
588              ## Now check if any data dir(s) is present
589 <            swAreaLen=len(swArea)
590 <            for root, dirs, files in os.walk(swArea):
591 <                if "data" in dirs:
592 <                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
593 <                    tar.add(root+"/data",root[swAreaLen:]+"/data")
594 <
595 <            ### Removed ProdAgent Api dependencies ###
596 <            ### Add ProdAgent dir to tar
597 <            #paDir = 'ProdAgentApi'
598 <            #pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi'
599 <            #if os.path.isdir(pa):
600 <            #    tar.add(pa,paDir)
589 >            self.dataExist = False
590 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
591 >            while len(todo_list):
592 >                entry, name = todo_list.pop()
593 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
594 >                    continue
595 >                if os.path.isdir(swArea+"/src/"+entry):
596 >                    entryPath = entry + '/'
597 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
598 >                    if name == 'data':
599 >                        self.dataExist=True
600 >                        common.logger.debug("data "+entry+" to be tarred")
601 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
602 >                    pass
603 >                pass
604  
605 <            ## Add ProdCommon dir to tar
606 <            prodcommonDir = 'ProdCommon'
607 <            prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon'
608 <            if os.path.isdir(prodcommonPath):
609 <                tar.add(prodcommonPath,prodcommonDir)
605 >            ### CMSSW ParameterSet
606 >            if not self.pset is None:
607 >                cfg_file = common.work_space.jobDir()+self.configFilename()
608 >                pickleFile = common.work_space.jobDir()+self.configFilename() + '.pkl'
609 >                tar.add(cfg_file,self.configFilename())
610 >                tar.add(pickleFile,self.configFilename() + '.pkl')
611  
612 <            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
613 <            tar.close()
614 <        except :
615 <            raise CrabException('Could not create tar-ball')
612 >            try:
613 >                crab_cfg_file = common.work_space.shareDir()+'/crab.cfg'
614 >                tar.add(crab_cfg_file,'crab.cfg')
615 >            except:
616 >                pass
617  
618 <        ## check for tarball size
619 <        tarballinfo = os.stat(self.tgzNameWithPath)
620 <        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
621 <            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
618 >            ## Add ProdCommon dir to tar
619 >            prodcommonDir = './'
620 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
621 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools', \
622 >                           'ProdCommon/Core', 'ProdCommon/MCPayloads', 'IMProv', 'ProdCommon/Storage', \
623 >                           'WMCore/__init__.py','WMCore/Algorithms']
624 >            for file in neededStuff:
625 >                tar.add(prodcommonPath+file,prodcommonDir+file)
626  
627 <        ## create tar-ball with ML stuff
628 <        self.MLtgzfile =  common.work_space.pathForTgz()+'share/MLfiles.tgz'
780 <        try:
781 <            tar = tarfile.open(self.MLtgzfile, "w:gz")
627 >            ##### ML stuff
628 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
629              path=os.environ['CRABDIR'] + '/python/'
630 <            for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py']:
630 >            for file in ML_file_list:
631 >                tar.add(path+file,file)
632 >
633 >            ##### Utils
634 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py','cmscp.py']
635 >            for file in Utils_file_list:
636                  tar.add(path+file,file)
637 <            common.logger.debug(5,"Files added to "+self.MLtgzfile+" : "+str(tar.getnames()))
637 >
638 >            ##### AdditionalFiles
639 >            tar.dereference=True
640 >            for file in self.additional_inbox_files:
641 >                tar.add(file,string.split(file,'/')[-1])
642 >            tar.dereference=False
643 >            common.logger.log(10-1,"Files in "+self.tgzNameWithPath+" : "+str(tar.getnames()))
644 >
645              tar.close()
646 <        except :
647 <            raise CrabException('Could not create ML files tar-ball')
646 >        except IOError, exc:
647 >            msg = 'Could not create tar-ball %s \n'%self.tgzNameWithPath
648 >            msg += str(exc)
649 >            raise CrabException(msg)
650 >        except tarfile.TarError, exc:
651 >            msg = 'Could not create tar-ball %s \n'%self.tgzNameWithPath
652 >            msg += str(exc)
653 >            raise CrabException(msg)
654  
655 <        return
655 >        tarballinfo = os.stat(self.tgzNameWithPath)
656 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
657 >            if not self.server:
658 >                msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + \
659 >                         str(self.MaxTarBallSize) +'MB input sandbox limit \n'
660 >                msg += '      and not supported by the direct GRID submission system.\n'
661 >                msg += '      Please use the CRAB server mode by setting server_name=<NAME> in section [CRAB] of your crab.cfg.\n'
662 >                msg += '      For further infos please see https://twiki.cern.ch/twiki/bin/view/CMS/SWGuideCrabServerForUsers#Server_available_for_users'
663 >            else:
664 >                msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' +  \
665 >                        str(self.MaxTarBallSize) +'MB input sandbox limit in the server.'
666 >            raise CrabException(msg)
667  
668 <    def additionalInputFileTgz(self):
793 <        """
794 <        Put all additional files into a tar ball and return its name
795 <        """
796 <        import tarfile
797 <        tarName=  common.work_space.pathForTgz()+'share/'+self.additional_tgz_name
798 <        tar = tarfile.open(tarName, "w:gz")
799 <        for file in self.additional_inbox_files:
800 <            tar.add(file,string.split(file,'/')[-1])
801 <        common.logger.debug(5,"Files added to "+self.additional_tgz_name+" : "+str(tar.getnames()))
802 <        tar.close()
803 <        return tarName
668 >        ## create tar-ball with ML stuff
669  
670 <    def wsSetupEnvironment(self, nj):
670 >    def wsSetupEnvironment(self, nj=0):
671          """
672          Returns part of a job script which prepares
673          the execution environment for the job 'nj'.
674          """
675 +        psetName = 'pset.py'
676 +
677          # Prepare JobType-independent part
678 <        txt = ''
678 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
679          txt += 'echo ">>> setup environment"\n'
680 <        txt += 'if [ $middleware == LCG ]; then \n'
680 >        txt += 'echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
681 >        txt += 'export SCRAM_ARCH=' + self.executable_arch + '\n'
682 >        txt += 'echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
683 >        txt += 'if [ $middleware == LCG ] || [ $middleware == CAF ] || [ $middleware == LSF ]; then \n'
684          txt += self.wsSetupCMSLCGEnvironment_()
685          txt += 'elif [ $middleware == OSG ]; then\n'
686          txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
687          txt += '    if [ ! $? == 0 ] ;then\n'
688 <        txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
689 <        txt += '        echo "JOB_EXIT_STATUS = 10016"\n'
690 <        txt += '        echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
821 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
822 <        txt += '        exit 1\n'
688 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
689 >        txt += '        job_exit_code=10016\n'
690 >        txt += '        func_exit\n'
691          txt += '    fi\n'
692          txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
693          txt += '\n'
# Line 827 | Line 695 | class Cmssw(JobType):
695          txt += '    cd $WORKING_DIR\n'
696          txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
697          txt += self.wsSetupCMSOSGEnvironment_()
698 <        #txt += '    echo "### Set SCRAM ARCH to ' + self.executable_arch + ' ###"\n'
699 <        #txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
698 >        #Setup SGE Environment
699 >        txt += 'elif [ $middleware == SGE ]; then\n'
700 >        txt += self.wsSetupCMSLCGEnvironment_()
701 >
702 >        txt += 'elif [ $middleware == ARC ]; then\n'
703 >        txt += self.wsSetupCMSLCGEnvironment_()
704 >
705 >        #Setup PBS Environment
706 >        txt += 'elif [ $middleware == PBS ]; then\n'
707 >        txt += self.wsSetupCMSLCGEnvironment_()
708 >
709          txt += 'fi\n'
710  
711          # Prepare JobType-specific part
# Line 839 | Line 716 | class Cmssw(JobType):
716          txt += scram+' project CMSSW '+self.version+'\n'
717          txt += 'status=$?\n'
718          txt += 'if [ $status != 0 ] ; then\n'
719 <        txt += '    echo "SET_EXE_ENV 10034 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
720 <        txt += '    echo "JOB_EXIT_STATUS = 10034"\n'
721 <        txt += '    echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n'
845 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
846 <        txt += '    if [ $middleware == OSG ]; then \n'
847 <        txt += '        cd $RUNTIME_AREA\n'
848 <        txt += '        echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
849 <        txt += '        echo ">>> Remove working directory: $WORKING_DIR"\n'
850 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
851 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
852 <        txt += '            echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
853 <        txt += '            echo "JOB_EXIT_STATUS = 10018"\n'
854 <        txt += '            echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
855 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
856 <        txt += '        fi\n'
857 <        txt += '    fi \n'
858 <        txt += '    exit 1 \n'
719 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
720 >        txt += '    job_exit_code=10034\n'
721 >        txt += '    func_exit\n'
722          txt += 'fi \n'
723          txt += 'cd '+self.version+'\n'
724 <        ########## FEDE FOR DBS2 ######################
862 <        txt += 'SOFTWARE_DIR=`pwd`\n'
724 >        txt += 'SOFTWARE_DIR=`pwd`; export SOFTWARE_DIR\n'
725          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
864        ###############################################
865        ### needed grep for bug in scramv1 ###
726          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
727 +        txt += 'if [ $? != 0 ] ; then\n'
728 +        txt += '    echo "ERROR ==> Problem with the command: "\n'
729 +        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
730 +        txt += '    job_exit_code=10034\n'
731 +        txt += '    func_exit\n'
732 +        txt += 'fi \n'
733          # Handle the arguments:
734          txt += "\n"
735 <        txt += "## number of arguments (first argument always jobnumber)\n"
735 >        txt += "## number of arguments (first argument always jobnumber, the second is the resubmission number)\n"
736          txt += "\n"
737 <        txt += "if [ $nargs -lt "+str(len(self.argsList[nj].split()))+" ]\n"
737 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
738          txt += "then\n"
739 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$nargs+ \n"
740 <        txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
741 <        txt += '    echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n'
876 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
877 <        txt += '    if [ $middleware == OSG ]; then \n'
878 <        txt += '        cd $RUNTIME_AREA\n'
879 <        txt += '        echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
880 <        txt += '        echo ">>> Remove working directory: $WORKING_DIR"\n'
881 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
882 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
883 <        txt += '            echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
884 <        txt += '            echo "JOB_EXIT_STATUS = 50114"\n'
885 <        txt += '            echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
886 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
887 <        txt += '        fi\n'
888 <        txt += '    fi \n'
889 <        txt += "    exit 1\n"
739 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
740 >        txt += '    job_exit_code=50113\n'
741 >        txt += "    func_exit\n"
742          txt += "fi\n"
743          txt += "\n"
744  
745          # Prepare job-specific part
746          job = common.job_list[nj]
895        ### FEDE FOR DBS OUTPUT PUBLICATION
747          if (self.datasetPath):
748              txt += '\n'
749              txt += 'DatasetPath='+self.datasetPath+'\n'
750  
751 <            datasetpath_split = self.datasetPath.split("/")
752 <
902 <            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
903 <            txt += 'DataTier='+datasetpath_split[2]+'\n'
751 >            txt += 'PrimaryDataset='+self.primaryDataset +'\n'
752 >            txt += 'DataTier='+self.dataTier+'\n'
753              txt += 'ApplicationFamily=cmsRun\n'
754  
755          else:
# Line 908 | Line 757 | class Cmssw(JobType):
757              txt += 'PrimaryDataset=null\n'
758              txt += 'DataTier=null\n'
759              txt += 'ApplicationFamily=MCDataTier\n'
760 <        if self.pset != None: #CarlosDaniele
760 >        if self.pset != None:
761              pset = os.path.basename(job.configFilename())
762 +            pkl  = os.path.basename(job.configFilename()) + '.pkl'
763              txt += '\n'
764              txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
765 <            if (self.datasetPath): # standard job
916 <                txt += 'InputFiles=${args[1]}\n'
917 <                txt += 'MaxEvents=${args[2]}\n'
918 <                txt += 'SkipEvents=${args[3]}\n'
919 <                txt += 'echo "Inputfiles:<$InputFiles>"\n'
920 <                txt += 'sed "s#\'INPUTFILE\'#$InputFiles#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
921 <                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
922 <                txt += 'sed "s#int32 input = 0#int32 input = $MaxEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
923 <                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
924 <                txt += 'sed "s#uint32 skipEvents = 0#uint32 skipEvents = $SkipEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
925 <            else:  # pythia like job
926 <                seedIndex=1
927 <                if (self.firstRun):
928 <                    txt += 'FirstRun=${args['+str(seedIndex)+']}\n'
929 <                    txt += 'echo "FirstRun: <$FirstRun>"\n'
930 <                    txt += 'sed "s#uint32 firstRun = 0#uint32 firstRun = $FirstRun#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
931 <                    seedIndex=seedIndex+1
932 <
933 <                if (self.sourceSeed):
934 <                    txt += 'Seed=${args['+str(seedIndex)+']}\n'
935 <                    txt += 'sed "s#uint32 sourceSeed = 0#uint32 sourceSeed = $Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
936 <                    seedIndex=seedIndex+1
937 <                    ## the following seeds are not always present
938 <                    if (self.sourceSeedVtx):
939 <                        txt += 'VtxSeed=${args['+str(seedIndex)+']}\n'
940 <                        txt += 'echo "VtxSeed: <$VtxSeed>"\n'
941 <                        txt += 'sed "s#uint32 VtxSmeared = 0#uint32 VtxSmeared = $VtxSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
942 <                        seedIndex += 1
943 <                    if (self.sourceSeedG4):
944 <                        txt += 'G4Seed=${args['+str(seedIndex)+']}\n'
945 <                        txt += 'echo "G4Seed: <$G4Seed>"\n'
946 <                        txt += 'sed "s#uint32 g4SimHits = 0#uint32 g4SimHits = $G4Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
947 <                        seedIndex += 1
948 <                    if (self.sourceSeedMix):
949 <                        txt += 'mixSeed=${args['+str(seedIndex)+']}\n'
950 <                        txt += 'echo "MixSeed: <$mixSeed>"\n'
951 <                        txt += 'sed "s#uint32 mix = 0#uint32 mix = $mixSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
952 <                        seedIndex += 1
953 <                    pass
954 <                pass
955 <            txt += 'mv -f '+pset+' pset.cfg\n'
956 <
957 <        if len(self.additional_inbox_files) > 0:
958 <            txt += 'if [ -e $RUNTIME_AREA/'+self.additional_tgz_name+' ] ; then\n'
959 <            txt += '  tar xzvf $RUNTIME_AREA/'+self.additional_tgz_name+'\n'
960 <            txt += 'fi\n'
961 <            pass
765 >            txt += 'cp  $RUNTIME_AREA/'+pkl+' .\n'
766  
767 <        if self.pset != None: #CarlosDaniele
768 <            txt += '\n'
769 <            txt += 'echo "***** cat pset.cfg *********"\n'
770 <            txt += 'cat pset.cfg\n'
771 <            txt += 'echo "****** end pset.cfg ********"\n'
772 <            txt += '\n'
773 <            ### FEDE FOR DBS OUTPUT PUBLICATION
774 <            txt += 'PSETHASH=`EdmConfigHash < pset.cfg` \n'
775 <            txt += 'echo "PSETHASH = $PSETHASH" \n'
776 <            ##############
767 >            txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
768 >            txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
769 >            txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
770 >            txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
771 >
772 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
773 >            if self.var_filter:
774 >                #print "self.var_filter = ",self.var_filter
775 >                txt += "export var_filter="+"'"+self.var_filter+"'\n"
776 >                txt += 'echo $var_filter'
777 >        else:
778              txt += '\n'
779 +            if self.AdditionalArgs: txt += 'export AdditionalArgs=\"%s\"\n'%(self.AdditionalArgs)
780 +            if int(self.NumEvents) != 0: txt += 'export MaxEvents=%s\n'%str(self.NumEvents)
781          return txt
782  
783 <    def wsBuildExe(self, nj=0):
783 >    def wsUntarSoftware(self, nj=0):
784          """
785          Put in the script the commands to build an executable
786          or a library.
787          """
788  
789 <        txt = ""
789 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
790  
791          if os.path.isfile(self.tgzNameWithPath):
792 <            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
793 <            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
792 >            txt += 'echo ">>> tar xzf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
793 >            if  self.debug_wrapper==1 :
794 >                txt += 'tar zxvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
795 >                txt += 'ls -Al \n'
796 >            else:
797 >                txt += 'tar zxf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
798              txt += 'untar_status=$? \n'
799              txt += 'if [ $untar_status -ne 0 ]; then \n'
800 <            txt += '   echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n'
801 <            txt += '   echo "JOB_EXIT_STATUS = $untar_status" \n'
802 <            txt += '   echo "JobExitCode=$untar_status" | tee -a $RUNTIME_AREA/$repo\n'
992 <            txt += '   if [ $middleware == OSG ]; then \n'
993 <            txt += '       cd $RUNTIME_AREA\n'
994 <            txt += '        echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
995 <            txt += '        echo ">>> Remove working directory: $WORKING_DIR"\n'
996 <            txt += '       /bin/rm -rf $WORKING_DIR\n'
997 <            txt += '       if [ -d $WORKING_DIR ] ;then\n'
998 <            txt += '           echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n'
999 <            txt += '           echo "JOB_EXIT_STATUS = 50999"\n'
1000 <            txt += '           echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n'
1001 <            txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1002 <            txt += '       fi\n'
1003 <            txt += '   fi \n'
1004 <            txt += '   \n'
1005 <            txt += '   exit 1 \n'
800 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
801 >            txt += '   job_exit_code=$untar_status\n'
802 >            txt += '   func_exit\n'
803              txt += 'else \n'
804              txt += '   echo "Successful untar" \n'
805              txt += 'fi \n'
806              txt += '\n'
807 <            #### Removed ProdAgent API dependencies
1011 <            txt += 'echo ">>> Include ProdCommon in PYTHONPATH:"\n'
807 >            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
808              txt += 'if [ -z "$PYTHONPATH" ]; then\n'
809 <            #### FEDE FOR DBS OUTPUT PUBLICATION
1014 <            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdCommon\n'
809 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
810              txt += 'else\n'
811 <            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n'
811 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
812              txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1018            ###################
813              txt += 'fi\n'
814              txt += '\n'
815  
# Line 1023 | Line 817 | class Cmssw(JobType):
817  
818          return txt
819  
820 <    def modifySteeringCards(self, nj):
820 >    def wsBuildExe(self, nj=0):
821          """
822 <        modify the card provided by the user,
823 <        writing a new card into share dir
822 >        Put in the script the commands to build an executable
823 >        or a library.
824          """
825  
826 +        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
827 +        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
828 +
829 +        txt += 'rm -r lib/ module/ \n'
830 +        txt += 'mv $RUNTIME_AREA/lib/ . \n'
831 +        txt += 'mv $RUNTIME_AREA/module/ . \n'
832 +        if self.dataExist == True:
833 +            txt += 'rm -r src/ \n'
834 +            txt += 'mv $RUNTIME_AREA/src/ . \n'
835 +        if len(self.additional_inbox_files)>0:
836 +            for file in self.additional_inbox_files:
837 +                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
838 +
839 +        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
840 +        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
841 +        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
842 +        txt += 'else\n'
843 +        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
844 +        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
845 +        txt += 'fi\n'
846 +        txt += '\n'
847 +
848 +        if self.pset != None:
849 +            psetName = 'pset.py'
850 +
851 +            txt += '\n'
852 +            if self.debug_wrapper == 1:
853 +                txt += 'echo "***** cat ' + psetName + ' *********"\n'
854 +                txt += 'cat ' + psetName + '\n'
855 +                txt += 'echo "****** end ' + psetName + ' ********"\n'
856 +                txt += '\n'
857 +                txt += 'echo "***********************" \n'
858 +                txt += 'which edmConfigHash \n'
859 +                txt += 'echo "***********************" \n'
860 +            txt += 'edmConfigHash ' + psetName + ' \n'
861 +            txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
862 +            txt += 'echo "PSETHASH = $PSETHASH" \n'
863 +            #### FEDE temporary fix for noEdm files #####
864 +            txt += 'if [ -z "$PSETHASH" ]; then \n'
865 +            txt += '   export PSETHASH=null\n'
866 +            txt += 'fi \n'
867 +            #############################################
868 +            txt += '\n'
869 +        return txt
870 +
871 +
872      def executableName(self):
873 <        if self.scriptExe: #CarlosDaniele
873 >        if self.scriptExe:
874              return "sh "
875          else:
876              return self.executable
877  
878      def executableArgs(self):
879 <        if self.scriptExe:#CarlosDaniele
880 <            return   self.scriptExe + " $NJob"
879 >        if self.scriptExe:
880 >            return self.scriptExe + " $NJob $AdditionalArgs"
881          else:
882 <            # if >= CMSSW_1_5_X, add -j crab_fjr.xml
1043 <            version_array = self.scram.getSWVersion().split('_')
1044 <            major = 0
1045 <            minor = 0
1046 <            try:
1047 <                major = int(version_array[1])
1048 <                minor = int(version_array[2])
1049 <            except:
1050 <                msg = "Cannot parse CMSSW version string: " + "_".join(version_array) + " for major and minor release number!"
1051 <                raise CrabException(msg)
1052 <            if major >= 1 and minor >= 5 :
1053 <                return " -j " + self.fjrFileName + " -p pset.cfg"
1054 <            else:
1055 <                return " -p pset.cfg"
882 >            return " -j $RUNTIME_AREA/crab_fjr_$NJob.xml -p pset.py"
883  
884      def inputSandbox(self, nj):
885          """
886          Returns a list of filenames to be put in JDL input sandbox.
887          """
888          inp_box = []
1062        # # dict added to delete duplicate from input sandbox file list
1063        # seen = {}
1064        ## code
889          if os.path.isfile(self.tgzNameWithPath):
890              inp_box.append(self.tgzNameWithPath)
891 <        if os.path.isfile(self.MLtgzfile):
892 <            inp_box.append(self.MLtgzfile)
893 <        ## config
1070 <        if not self.pset is None:
1071 <            inp_box.append(common.work_space.pathForTgz() + 'job/' + self.configFilename())
1072 <        ## additional input files
1073 <        tgz = self.additionalInputFileTgz()
1074 <        inp_box.append(tgz)
891 >        if os.path.isfile(self.argsFile):
892 >            inp_box.append(self.argsFile)
893 >        inp_box.append(common.work_space.jobDir() + self.scriptName)
894          return inp_box
895  
896      def outputSandbox(self, nj):
# Line 1083 | Line 902 | class Cmssw(JobType):
902          ## User Declared output files
903          for out in (self.output_file+self.output_file_sandbox):
904              n_out = nj + 1
905 <            out_box.append(self.numberFile_(out,str(n_out)))
905 >            out_box.append(numberFile(out,str(n_out)))
906          return out_box
907  
1089    def prepareSteeringCards(self):
1090        """
1091        Make initial modifications of the user's steering card file.
1092        """
1093        return
908  
909      def wsRenameOutput(self, nj):
910          """
911          Returns part of a job script which renames the produced files.
912          """
913  
914 <        txt = '\n'
914 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
915          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
916          txt += 'echo ">>> current directory content:"\n'
917 <        txt += 'ls \n'
917 >        if self.debug_wrapper==1:
918 >            txt += 'ls -Al\n'
919          txt += '\n'
920  
1106        txt += 'output_exit_status=0\n'
1107
1108        for fileWithSuffix in (self.output_file_sandbox):
1109            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1110            txt += '\n'
1111            txt += '# check output file\n'
1112            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1113            txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1114            txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
1115            txt += 'else\n'
1116            txt += '    exit_status=60302\n'
1117            txt += '    echo "ERROR: Output file '+fileWithSuffix+' not found"\n'
1118            if common.scheduler.name().upper() == 'CONDOR_G':
1119                txt += '    if [ $middleware == OSG ]; then \n'
1120                txt += '        echo "prepare dummy output file"\n'
1121                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1122                txt += '    fi \n'
1123            txt += 'fi\n'
1124
921          for fileWithSuffix in (self.output_file):
922 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
922 >            output_file_num = numberFile(fileWithSuffix, '$OutUniqueID')
923              txt += '\n'
924              txt += '# check output file\n'
925              txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
# Line 1134 | Line 930 | class Cmssw(JobType):
930                  txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
931                  txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
932              txt += 'else\n'
933 <            txt += '    exit_status=60302\n'
934 <            txt += '    echo "ERROR: Output file '+fileWithSuffix+' not found"\n'
1139 <            txt += '    echo "JOB_EXIT_STATUS = $exit_status"\n'
1140 <            txt += '    output_exit_status=$exit_status\n'
933 >            txt += '    job_exit_code=60302\n'
934 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
935              if common.scheduler.name().upper() == 'CONDOR_G':
936                  txt += '    if [ $middleware == OSG ]; then \n'
937                  txt += '        echo "prepare dummy output file"\n'
# Line 1146 | Line 940 | class Cmssw(JobType):
940              txt += 'fi\n'
941          file_list = []
942          for fileWithSuffix in (self.output_file):
943 <             file_list.append(self.numberFile_(fileWithSuffix, '$NJob'))
943 >             file_list.append(numberFile('$SOFTWARE_DIR/'+fileWithSuffix, '$OutUniqueID'))
944  
945 <        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
945 >        txt += 'file_list="'+string.join(file_list,',')+'"\n'
946          txt += '\n'
947          txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
948          txt += 'echo ">>> current directory content:"\n'
949 <        txt += 'ls \n'
949 >        if self.debug_wrapper==1:
950 >            txt += 'ls -Al\n'
951          txt += '\n'
952          txt += 'cd $RUNTIME_AREA\n'
953          txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
954          return txt
955  
1161    def numberFile_(self, file, txt):
1162        """
1163        append _'txt' before last extension of a file
1164        """
1165        p = string.split(file,".")
1166        # take away last extension
1167        name = p[0]
1168        for x in p[1:-1]:
1169            name=name+"."+x
1170        # add "_txt"
1171        if len(p)>1:
1172            ext = p[len(p)-1]
1173            result = name + '_' + txt + "." + ext
1174        else:
1175            result = name + '_' + txt
1176
1177        return result
1178
956      def getRequirements(self, nj=[]):
957          """
958          return job requirements to add to jdl files
# Line 1185 | Line 962 | class Cmssw(JobType):
962              req='Member("VO-cms-' + \
963                   self.version + \
964                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
965 <        ## SL add requirement for OS version only if SL4
1189 <        #reSL4 = re.compile( r'slc4' )
1190 <        if self.executable_arch: # and reSL4.search(self.executable_arch):
965 >        if self.executable_arch:
966              req+=' && Member("VO-cms-' + \
967                   self.executable_arch + \
968                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
969  
970          req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
971 +        if ( common.scheduler.name() in ["glite"] ):
972 +            ## 25-Jun-2009 SL: patch to use Cream enabled WMS
973 +            if ( self.cfg_params.get('GRID.use_cream',None) ):
974 +                req += ' && (other.GlueCEStateStatus == "Production" || other.GlueCEStateStatus == "Special")'
975 +            else:
976 +                req += ' && other.GlueCEStateStatus == "Production" '
977  
978          return req
979  
980      def configFilename(self):
981          """ return the config filename """
982 <        return self.name()+'.cfg'
982 >        return self.name()+'.py'
983  
984      def wsSetupCMSOSGEnvironment_(self):
985          """
986          Returns part of a job script which is prepares
987          the execution environment and which is common for all CMS jobs.
988          """
989 <        txt = '    echo ">>> setup CMS OSG environment:"\n'
989 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
990 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
991          txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
992          txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
993          txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
# Line 1213 | Line 995 | class Cmssw(JobType):
995          txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
996          txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
997          txt += '    else\n'
998 <        txt += '        echo "SET_CMS_ENV 10020 ==> ERROR $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
999 <        txt += '        echo "JOB_EXIT_STATUS = 10020"\n'
1000 <        txt += '        echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1219 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1220 <        txt += '\n'
1221 <        txt += '        cd $RUNTIME_AREA\n'
1222 <        txt += '        echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
1223 <        txt += '        echo ">>> Remove working directory: $WORKING_DIR"\n'
1224 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
1225 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
1226 <        txt += '            echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1227 <        txt += '            echo "JOB_EXIT_STATUS = 10017"\n'
1228 <        txt += '            echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1229 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
1230 <        txt += '        fi\n'
1231 <        txt += '\n'
1232 <        txt += '        exit 1\n'
998 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
999 >        txt += '        job_exit_code=10020\n'
1000 >        txt += '        func_exit\n'
1001          txt += '    fi\n'
1002          txt += '\n'
1003 <        txt += '    echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1003 >        txt += '    echo "==> setup cms environment ok"\n'
1004          txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1005  
1006          return txt
1007  
1240    ### OLI_DANIELE
1008      def wsSetupCMSLCGEnvironment_(self):
1009          """
1010          Returns part of a job script which is prepares
1011          the execution environment and which is common for all CMS jobs.
1012          """
1013 <        txt = '    echo ">>> setup CMS LCG environment:"\n'
1013 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
1014 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
1015          txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
1016          txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1017          txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
1018          txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
1019 <        txt += '        echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n'
1020 <        txt += '        echo "JOB_EXIT_STATUS = 10031" \n'
1021 <        txt += '        echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
1254 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1255 <        txt += '        exit 1\n'
1019 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
1020 >        txt += '        job_exit_code=10031\n'
1021 >        txt += '        func_exit\n'
1022          txt += '    else\n'
1023          txt += '        echo "Sourcing environment... "\n'
1024          txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1025 <        txt += '            echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1026 <        txt += '            echo "JOB_EXIT_STATUS = 10020"\n'
1027 <        txt += '            echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1262 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
1263 <        txt += '            exit 1\n'
1025 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1026 >        txt += '            job_exit_code=10020\n'
1027 >        txt += '            func_exit\n'
1028          txt += '        fi\n'
1029          txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1030          txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1031          txt += '        result=$?\n'
1032          txt += '        if [ $result -ne 0 ]; then\n'
1033 <        txt += '            echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1034 <        txt += '            echo "JOB_EXIT_STATUS = 10032"\n'
1035 <        txt += '            echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1272 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
1273 <        txt += '            exit 1\n'
1033 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1034 >        txt += '            job_exit_code=10032\n'
1035 >        txt += '            func_exit\n'
1036          txt += '        fi\n'
1037          txt += '    fi\n'
1038          txt += '    \n'
1039 <        txt += '    echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1039 >        txt += '    echo "==> setup cms environment ok"\n'
1040          return txt
1041  
1042 <    ### FEDE FOR DBS OUTPUT PUBLICATION
1281 <    def modifyReport(self, nj):
1042 >    def wsModifyReport(self, nj):
1043          """
1044          insert the part of the script that modifies the FrameworkJob Report
1045          """
1046  
1047          txt = ''
1048 <        try:
1049 <            publish_data = int(self.cfg_params['USER.publish_data'])
1050 <        except KeyError:
1290 <            publish_data = 0
1291 <        if (publish_data == 1):
1048 >        if (self.copy_data == 1):
1049 >            txt = '\n#Written by cms_cmssw::wsModifyReport\n'
1050 >
1051              txt += 'echo ">>> Modify Job Report:" \n'
1052 <            ################ FEDE FOR DBS2 #############################################
1053 <            #txt += 'chmod a+x $SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1295 <            txt += 'chmod a+x $SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1296 <            #############################################################################
1052 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1053 >            txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1054  
1055 <            txt += 'if [ -z "$SE" ]; then\n'
1299 <            txt += '    SE="" \n'
1300 <            txt += 'fi \n'
1301 <            txt += 'if [ -z "$SE_PATH" ]; then\n'
1302 <            txt += '    SE_PATH="" \n'
1303 <            txt += 'fi \n'
1304 <            txt += 'echo "SE = $SE"\n'
1305 <            txt += 'echo "SE_PATH = $SE_PATH"\n'
1055 >            args = 'fjr $RUNTIME_AREA/crab_fjr_$NJob.xml json $RUNTIME_AREA/resultCopyFile n_job $OutUniqueID PrimaryDataset $PrimaryDataset  ApplicationFamily $ApplicationFamily ApplicationName $executable cmssw_version $CMSSW_VERSION psethash $PSETHASH'
1056  
1057 <            processedDataset = self.cfg_params['USER.publish_data_name']
1058 <            txt += 'ProcessedDataset='+processedDataset+'\n'
1059 <            #### LFN=/store/user/<user>/processedDataset_PSETHASH
1060 <            txt += 'if [ "$SE_PATH" == "" ]; then\n'
1311 <            #### FEDE: added slash in LFN ##############
1312 <            txt += '    FOR_LFN=/copy_problems/ \n'
1313 <            txt += 'else \n'
1314 <            txt += '    tmp=`echo $SE_PATH | awk -F \'store\' \'{print$2}\'` \n'
1315 <            #####  FEDE TO BE CHANGED, BECAUSE STORE IS HARDCODED!!!! ########
1316 <            txt += '    FOR_LFN=/store$tmp \n'
1317 <            txt += 'fi \n'
1318 <            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1319 <            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1320 <            txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1321 <            txt += 'echo "$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1322 <            txt += '$SOFTWARE_DIR/ProdCommon/ProdCommon/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1057 >            if (self.publish_data == 1):
1058 >                txt += 'ProcessedDataset='+self.processedDataset+'\n'
1059 >                txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1060 >                args += ' UserProcessedDataset $USER-$ProcessedDataset-$PSETHASH'
1061  
1062 +            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'"\n'
1063 +            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'\n'
1064              txt += 'modifyReport_result=$?\n'
1325            txt += 'echo modifyReport_result = $modifyReport_result\n'
1065              txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1066 <            txt += '    exit_status=1\n'
1067 <            txt += '    echo "ERROR: Problem with ModifyJobReport"\n'
1066 >            txt += '    modifyReport_result=70500\n'
1067 >            txt += '    job_exit_code=$modifyReport_result\n'
1068 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
1069 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
1070              txt += 'else\n'
1071 <            txt += '    mv NewFrameworkJobReport.xml crab_fjr_$NJob.xml\n'
1071 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1072              txt += 'fi\n'
1332        else:
1333            txt += 'echo "no data publication required"\n'
1073          return txt
1074  
1075 <    def cleanEnv(self):
1076 <        txt = ''
1077 <        txt += 'if [ $middleware == OSG ]; then\n'
1078 <        txt += '    cd $RUNTIME_AREA\n'
1079 <        txt += '    echo ">>> current directory (RUNTIME_AREA): $RUNTIME_AREA"\n'
1080 <        txt += '    echo ">>> Remove working directory: $WORKING_DIR"\n'
1081 <        txt += '    /bin/rm -rf $WORKING_DIR\n'
1082 <        txt += '    if [ -d $WORKING_DIR ] ;then\n'
1083 <        txt += '        echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1084 <        txt += '        echo "JOB_EXIT_STATUS = 60999"\n'
1085 <        txt += '        echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1086 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1075 >    def wsParseFJR(self):
1076 >        """
1077 >        Parse the FrameworkJobReport to obtain useful infos
1078 >        """
1079 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
1080 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
1081 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1082 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1083 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1084 >        if self.debug_wrapper==1 :
1085 >            txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1086 >        txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1087 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1088 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1089 >        txt += '            cat $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1090 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1091 >        txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1092 >        txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
1093 >        txt += '        else\n'
1094 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1095 >        txt += '        fi\n'
1096 >        txt += '    else\n'
1097 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1098          txt += '    fi\n'
1099 +          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1100 +        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1101 +        txt += '        echo ">>> Executable succeded  $executable_exit_status"\n'
1102 +        txt += '    fi\n'
1103 +        txt += 'else\n'
1104 +        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1105          txt += 'fi\n'
1106          txt += '\n'
1107 +        txt += 'if [ $executable_exit_status -ne 0 ];then\n'
1108 +        txt += '    echo ">>> Executable failed  $executable_exit_status"\n'
1109 +        txt += '    echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1110 +        txt += '    echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1111 +        txt += '    job_exit_code=$executable_exit_status\n'
1112 +        txt += '    func_exit\n'
1113 +        txt += 'fi\n\n'
1114 +        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1115 +        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1116 +        txt += 'job_exit_code=$executable_exit_status\n'
1117 +
1118          return txt
1119  
1120      def setParam_(self, param, value):
# Line 1356 | Line 1123 | class Cmssw(JobType):
1123      def getParams(self):
1124          return self._params
1125  
1126 <    def uniquelist(self, old):
1360 <        """
1361 <        remove duplicates from a list
1362 <        """
1363 <        nd={}
1364 <        for e in old:
1365 <            nd[e]=0
1366 <        return nd.keys()
1367 <
1368 <
1369 <    def checkOut(self, limit):
1126 >    def outList(self,list=False):
1127          """
1128          check the dimension of the output files
1129          """
1130 <        txt = 'echo ">>> Starting output sandbox limit check :"\n'
1130 >        txt = ''
1131 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1132          listOutFiles = []
1133 <        txt += 'stdoutFile=`ls *stdout` \n'
1134 <        txt += 'stderrFile=`ls *stderr` \n'
1133 >        stdout = 'CMSSW_$NJob.stdout'
1134 >        stderr = 'CMSSW_$NJob.stderr'
1135 >        if len(self.output_file) <= 0:
1136 >            msg ="WARNING: no output files name have been defined!!\n"
1137 >            msg+="\tno output files will be reported back/staged\n"
1138 >            common.logger.info(msg)
1139 >
1140          if (self.return_data == 1):
1141 <            for file in (self.output_file+self.output_file_sandbox):
1142 <                listOutFiles.append(self.numberFile_(file, '$NJob'))
1143 <            listOutFiles.append('$stdoutFile')
1144 <            listOutFiles.append('$stderrFile')
1145 <        else:
1146 <            for file in (self.output_file_sandbox):
1147 <                listOutFiles.append(self.numberFile_(file, '$NJob'))
1148 <            listOutFiles.append('$stdoutFile')
1386 <            listOutFiles.append('$stderrFile')
1387 <  
1388 <        txt += 'echo "OUTPUT files: '+string.join(listOutFiles,' ')+'"\n'
1141 >            for file in (self.output_file):
1142 >                listOutFiles.append(numberFile(file, '$OutUniqueID'))
1143 >        for file in (self.output_file_sandbox):
1144 >            listOutFiles.append(numberFile(file, '$NJob'))
1145 >        listOutFiles.append(stdout)
1146 >        listOutFiles.append(stderr)
1147 >
1148 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1149          txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1150 <       # txt += 'echo "OUTPUT files: '+str(allOutFiles)+'";\n'
1151 <        txt += 'ls -gGhrta;\n'
1152 <        txt += 'sum=0;\n'
1153 <        txt += 'for file in $filesToCheck ; do\n'
1394 <        txt += '    if [ -e $file ]; then\n'
1395 <        txt += '        tt=`ls -gGrta $file | awk \'{ print $3 }\'`\n'
1396 <        txt += '        sum=`expr $sum + $tt`\n'
1397 <        txt += '    else\n'
1398 <        txt += '        echo "WARNING: output file $file not found!"\n'
1399 <        txt += '    fi\n'
1400 <        txt += 'done\n'
1401 <        txt += 'echo "Total Output dimension: $sum";\n'
1402 <        txt += 'limit='+str(limit)+';\n'
1403 <        txt += 'echo "OUTPUT FILES LIMIT SET TO: $limit";\n'
1404 <        txt += 'if [ $limit -lt $sum ]; then\n'
1405 <        txt += '    echo "WARNING: output files have to big size - something will be lost;"\n'
1406 <        txt += '    echo "         checking the output file sizes..."\n'
1407 <        txt += '    tot=0;\n'
1408 <        txt += '    for filefile in $filesToCheck ; do\n'
1409 <        txt += '        dimFile=`ls -gGrta $filefile | awk \'{ print $3 }\';`\n'
1410 <        txt += '        tot=`expr $tot + $tt`;\n'
1411 <        txt += '        if [ $limit -lt $dimFile ]; then\n'
1412 <        txt += '            echo "deleting file: $filefile";\n'
1413 <        txt += '            rm -f $filefile\n'
1414 <        txt += '        elif [ $limit -lt $tot ]; then\n'
1415 <        txt += '            echo "deleting file: $filefile";\n'
1416 <        txt += '            rm -f $filefile\n'
1417 <        txt += '        else\n'
1418 <        txt += '            echo "saving file: $filefile"\n'
1419 <        txt += '        fi\n'
1420 <        txt += '    done\n'
1150 >        txt += 'export filesToCheck\n'
1151 >        taskinfo={}
1152 >        taskinfo['outfileBasename'] = self.output_file
1153 >        common._db.updateTask_(taskinfo)
1154  
1155 <        txt += '    ls -agGhrt;\n'
1423 <        txt += '    echo "WARNING: output files are too big in dimension: can not put in the output_sandbox.";\n'
1424 <        txt += '    echo "JOB_EXIT_STATUS = 70000";\n'
1425 <        txt += '    exit_status=70000;\n'
1426 <        txt += 'else'
1427 <        txt += '    echo "Total Output dimension $sum is fine.";\n'
1428 <        txt += 'fi\n'
1429 <        txt += 'echo "Ending output sandbox limit check"\n'
1155 >        if list : return self.output_file
1156          return txt
1157 +
1158 +    def checkCMSSWVersion(self, url = "https://cmstags.cern.ch/cgi-bin/CmsTC/", fileName = "ReleasesXML"):
1159 +        """
1160 +        compare current CMSSW release and arch with allowed releases
1161 +        """
1162 +
1163 +        downloader = Downloader(url)
1164 +        goodRelease = False
1165 +
1166 +        try:
1167 +            result = downloader.config(fileName)
1168 +        except:
1169 +            common.logger.info("ERROR: Problem reading file of allowed CMSSW releases.")
1170 +
1171 +        try:
1172 +            events = pulldom.parseString(result)
1173 +
1174 +            arch     = None
1175 +            release  = None
1176 +            relType  = None
1177 +            relState = None
1178 +            for (event, node) in events:
1179 +                if event == pulldom.START_ELEMENT:
1180 +                    if node.tagName == 'architecture':
1181 +                        arch = node.attributes.getNamedItem('name').nodeValue
1182 +                    if node.tagName == 'project':
1183 +                        relType = node.attributes.getNamedItem('type').nodeValue
1184 +                        relState = node.attributes.getNamedItem('state').nodeValue
1185 +                        if relType == 'Production' and relState == 'Announced':
1186 +                            release = node.attributes.getNamedItem('label').nodeValue
1187 +                if self.executable_arch == arch and self.version == release:
1188 +                    goodRelease = True
1189 +                    return goodRelease
1190 +
1191 +            if not goodRelease:
1192 +                msg = "WARNING: %s on %s is not a supported release. " % \
1193 +                        (self.version, self.executable_arch)
1194 +                msg += "Submission may fail."
1195 +                common.logger.info(msg)
1196 +        except:
1197 +            common.logger.info("Problems parsing file of allowed CMSSW releases.")
1198 +
1199 +        return goodRelease
1200 +

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines