ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.116.2.8 by fanzago, Wed Oct 17 13:25:00 2007 UTC vs.
Revision 1.365 by spiga, Tue Nov 9 21:10:07 2010 UTC

# Line 1 | Line 1
1 +
2 + __revision__ = "$Id$"
3 + __version__ = "$Revision$"
4 +
5   from JobType import JobType
2 from crab_logger import Logger
6   from crab_exceptions import *
7   from crab_util import *
5 from BlackWhiteListParser import BlackWhiteListParser
8   import common
9 + import re
10   import Scram
11 + from Splitter import JobSplitter
12 + from Downloader import Downloader
13 + try:
14 +    import json
15 + except:
16 +    import simplejson as json
17  
18 + from IMProv.IMProvNode import IMProvNode
19 + from IMProv.IMProvLoader import loadIMProvFile
20   import os, string, glob
21 + from xml.dom import pulldom
22  
23   class Cmssw(JobType):
24 <    def __init__(self, cfg_params, ncjobs):
24 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
25          JobType.__init__(self, 'CMSSW')
26 <        common.logger.debug(3,'CMSSW::__init__')
27 <
26 >        common.logger.debug('CMSSW::__init__')
27 >        self.skip_blocks = skip_blocks
28 >        self.argsList = 2
29 >        self.NumEvents=0
30          self._params = {}
31          self.cfg_params = cfg_params
32 +        ### FEDE FOR MULTI ###
33 +        self.var_filter=''
34  
35 <        # init BlackWhiteListParser
36 <        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
37 <
38 <        try:
39 <            self.MaxTarBallSize = float(self.cfg_params['EDG.maxtarballsize'])
40 <        except KeyError:
41 <            self.MaxTarBallSize = 9.5
35 >        ### Temporary patch to automatically skip the ISB size check:
36 >        self.server = self.cfg_params.get('CRAB.server_name',None) or \
37 >                      self.cfg_params.get('CRAB.use_server',0)
38 >        self.local  = common.scheduler.name().upper() in ['LSF','CAF','CONDOR','SGE','PBS']
39 >        size = 9.5
40 >        if self.server :
41 >            size = 1000
42 >        elif self.local:
43 >            size = 9999999
44 >        self.MaxTarBallSize = float(self.cfg_params.get('GRID.maxtarballsize',size))
45  
46          # number of jobs requested to be created, limit obj splitting
47          self.ncjobs = ncjobs
48  
30        log = common.logger
31        
49          self.scram = Scram.Scram(cfg_params)
50          self.additional_inbox_files = []
51          self.scriptExe = ''
52          self.executable = ''
53          self.executable_arch = self.scram.getArch()
54          self.tgz_name = 'default.tgz'
38        self.additional_tgz_name = 'additional.tgz'
55          self.scriptName = 'CMSSW.sh'
56 <        self.pset = ''      #scrip use case Da  
57 <        self.datasetPath = '' #scrip use case Da
56 >        self.pset = ''
57 >        self.datasetPath = ''
58  
59 +        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
60          # set FJR file name
61          self.fjrFileName = 'crab_fjr.xml'
62  
63          self.version = self.scram.getSWVersion()
64 <        
65 <        #
66 <        # Try to block creation in case of arch/version mismatch
67 <        #
68 <
69 <        a = string.split(self.version, "_")
70 <
71 <        if int(a[1]) == 1 and (int(a[2]) < 5 and self.executable_arch.find('slc4') == 0):
72 <            msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch)
64 >        common.logger.log(10-1,"CMSSW version is: "+str(self.version))
65 >        version_array = self.version.split('_')
66 >        self.CMSSW_major = 0
67 >        self.CMSSW_minor = 0
68 >        self.CMSSW_patch = 0
69 >        try:
70 >            self.CMSSW_major = int(version_array[1])
71 >            self.CMSSW_minor = int(version_array[2])
72 >            self.CMSSW_patch = int(version_array[3])
73 >        except:
74 >            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
75              raise CrabException(msg)
57        if int(a[1]) == 1 and (int(a[2]) >= 5 and self.executable_arch.find('slc3') == 0):
58            msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch)
59            raise CrabException(msg)
60        
61        common.taskDB.setDict('codeVersion',self.version)
62        self.setParam_('application', self.version)
76  
77 +        if self.CMSSW_major < 2 or (self.CMSSW_major == 2 and self.CMSSW_minor < 1):
78 +            msg = "CRAB supports CMSSW >= 2_1_x only. Use an older CRAB version."
79 +            raise CrabException(msg)
80 +            """
81 +            As CMSSW versions are dropped we can drop more code:
82 +            2.x dropped: drop check for lumi range setting
83 +            """
84 +        self.checkCMSSWVersion()
85          ### collect Data cards
86  
87 <        ## get DBS mode
88 <        try:
68 <            self.use_dbs_1 = int(self.cfg_params['CMSSW.use_dbs_1'])
69 <        except KeyError:
70 <            self.use_dbs_1 = 0
71 <            
72 <        try:
73 <            tmp =  cfg_params['CMSSW.datasetpath']
74 <            log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
75 <            if string.lower(tmp)=='none':
76 <                self.datasetPath = None
77 <                self.selectNoInput = 1
78 <            else:
79 <                self.datasetPath = tmp
80 <                self.selectNoInput = 0
81 <        except KeyError:
82 <            msg = "Error: datasetpath not defined "  
83 <            raise CrabException(msg)
87 >        ### Temporary: added to remove input file control in the case of PU
88 >        self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
89  
90 <        # ML monitoring
91 <        # split dataset path style: /PreProdR3Minbias/SIM/GEN-SIM
92 <        if not self.datasetPath:
93 <            self.setParam_('dataset', 'None')
94 <            self.setParam_('owner', 'None')
90 >        tmp =  cfg_params['CMSSW.datasetpath']
91 >        common.logger.log(10-1, "CMSSW::CMSSW(): datasetPath = "+tmp)
92 >
93 >        if tmp =='':
94 >            msg = "Error: datasetpath not defined "
95 >            raise CrabException(msg)
96 >        elif string.lower(tmp)=='none':
97 >            self.datasetPath = None
98 >            self.selectNoInput = 1
99 >            self.primaryDataset = 'null'
100          else:
101 <            try:
102 <                datasetpath_split = self.datasetPath.split("/")
103 <                # standard style
104 <                self.setParam_('datasetFull', self.datasetPath)
105 <                if self.use_dbs_1 == 1 :
106 <                    self.setParam_('dataset', datasetpath_split[1])
107 <                    self.setParam_('owner', datasetpath_split[-1])
108 <                else:
109 <                    self.setParam_('dataset', datasetpath_split[1])
100 <                    self.setParam_('owner', datasetpath_split[2])
101 <            except:
102 <                self.setParam_('dataset', self.datasetPath)
103 <                self.setParam_('owner', self.datasetPath)
104 <                
105 <        self.setTaskid_()
106 <        self.setParam_('taskId', self.cfg_params['taskId'])
101 >            self.datasetPath = tmp
102 >            self.selectNoInput = 0
103 >            ll = len(self.datasetPath.split("/"))
104 >            if (ll < 4):
105 >                msg = 'Your datasetpath has a invalid format ' + self.datasetPath + '\n'
106 >                msg += 'Expected a path in format /PRIMARY/PROCESSED/TIER1-TIER2 or /PRIMARY/PROCESSED/TIER/METHOD for ADS'
107 >                raise CrabException(msg)
108 >            self.primaryDataset = self.datasetPath.split("/")[1]
109 >            self.dataTier = self.datasetPath.split("/")[2]
110  
111 <        self.dataTiers = []
111 >        # Analysis dataset is primary/processed/tier/definition
112 >        self.ads = False
113 >        if self.datasetPath:
114 >            self.ads = len(self.datasetPath.split("/")) > 4
115 >        self.lumiMask = self.cfg_params.get('CMSSW.lumi_mask',None)
116 >        self.lumiParams = self.cfg_params.get('CMSSW.total_number_of_lumis',None) or \
117 >                          self.cfg_params.get('CMSSW.lumis_per_job',None)
118 >
119 >        # FUTURE: Can remove this check
120 >        if self.ads and self.CMSSW_major < 3:
121 >            common.logger.info('Warning: Analysis dataset support is incomplete in CMSSW 2_x.')
122 >            common.logger.info('  Only file level, not lumi level, granularity is supported.')
123 >
124 >        self.debugWrap=''
125 >        self.debug_wrapper = int(cfg_params.get('USER.debug_wrapper',0))
126 >        if self.debug_wrapper == 1: self.debugWrap='--debug'
127  
128          ## now the application
129 <        try:
130 <            self.executable = cfg_params['CMSSW.executable']
131 <            self.setParam_('exe', self.executable)
132 <            log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
115 <            msg = "Default executable cmsRun overridden. Switch to " + self.executable
116 <            log.debug(3,msg)
117 <        except KeyError:
118 <            self.executable = 'cmsRun'
119 <            self.setParam_('exe', self.executable)
120 <            msg = "User executable not defined. Use cmsRun"
121 <            log.debug(3,msg)
122 <            pass
129 >        self.managedGenerators = ['madgraph', 'comphep', 'lhe']
130 >        self.generator = cfg_params.get('CMSSW.generator','pythia').lower()
131 >        self.executable = cfg_params.get('CMSSW.executable','cmsRun')
132 >        common.logger.log(10-1, "CMSSW::CMSSW(): executable = "+self.executable)
133  
134 <        try:
125 <            self.pset = cfg_params['CMSSW.pset']
126 <            log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
127 <            if self.pset.lower() != 'none' :
128 <                if (not os.path.exists(self.pset)):
129 <                    raise CrabException("User defined PSet file "+self.pset+" does not exist")
130 <            else:
131 <                self.pset = None
132 <        except KeyError:
134 >        if not cfg_params.has_key('CMSSW.pset'):
135              raise CrabException("PSet file missing. Cannot run cmsRun ")
136 +        self.pset = cfg_params['CMSSW.pset']
137 +        common.logger.log(10-1, "Cmssw::Cmssw(): PSet file = "+self.pset)
138 +        if self.pset.lower() != 'none' :
139 +            if (not os.path.exists(self.pset)):
140 +                raise CrabException("User defined PSet file "+self.pset+" does not exist")
141 +        else:
142 +            self.pset = None
143  
144          # output files
145          ## stuff which must be returned always via sandbox
# Line 140 | Line 149 | class Cmssw(JobType):
149          self.output_file_sandbox.append(self.fjrFileName)
150  
151          # other output files to be returned via sandbox or copied to SE
152 <        try:
153 <            self.output_file = []
154 <            tmp = cfg_params['CMSSW.output_file']
155 <            if tmp != '':
156 <                tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
157 <                log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
158 <                for tmp in tmpOutFiles:
159 <                    tmp=string.strip(tmp)
160 <                    self.output_file.append(tmp)
161 <                    pass
162 <            else:
163 <                log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
164 <                pass
156 <            pass
157 <        except KeyError:
158 <            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
159 <            pass
152 >        outfileflag = False
153 >        self.output_file = []
154 >        tmp = cfg_params.get('CMSSW.output_file',None)
155 >        if tmp :
156 >            self.output_file = [x.strip() for x in tmp.split(',')]
157 >            outfileflag = True #output found
158 >
159 >        self.scriptExe = cfg_params.get('USER.script_exe',None)
160 >        if self.scriptExe :
161 >            if not os.path.isfile(self.scriptExe):
162 >                msg ="ERROR. file "+self.scriptExe+" not found"
163 >                raise CrabException(msg)
164 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
165  
166 <        # script_exe file as additional file in inputSandbox
167 <        try:
163 <            self.scriptExe = cfg_params['USER.script_exe']
164 <            if self.scriptExe != '':
165 <               if not os.path.isfile(self.scriptExe):
166 <                  msg ="ERROR. file "+self.scriptExe+" not found"
167 <                  raise CrabException(msg)
168 <               self.additional_inbox_files.append(string.strip(self.scriptExe))
169 <        except KeyError:
170 <            self.scriptExe = ''
166 >        self.AdditionalArgs = cfg_params.get('USER.script_arguments',None)
167 >        if self.AdditionalArgs : self.AdditionalArgs = string.replace(self.AdditionalArgs,',',' ')
168  
172        #CarlosDaniele
169          if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
170 <           msg ="Error. script_exe  not defined"
171 <           raise CrabException(msg)
170 >            msg ="Error. script_exe  not defined"
171 >            raise CrabException(msg)
172 >
173 >        # use parent files...
174 >        self.useParent = int(self.cfg_params.get('CMSSW.use_parent',0))
175  
176          ## additional input files
177 <        try:
177 >        if cfg_params.has_key('USER.additional_input_files'):
178              tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
179              for tmp in tmpAddFiles:
180                  tmp = string.strip(tmp)
# Line 192 | Line 191 | class Cmssw(JobType):
191                      if not os.path.exists(file):
192                          raise CrabException("Additional input file not found: "+file)
193                      pass
195                    # fname = string.split(file, '/')[-1]
196                    # storedFile = common.work_space.pathForTgz()+'share/'+fname
197                    # shutil.copyfile(file, storedFile)
194                      self.additional_inbox_files.append(string.strip(file))
195                  pass
196              pass
197 <            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
198 <        except KeyError:
203 <            pass
204 <
205 <        # files per job
206 <        try:
207 <            if (cfg_params['CMSSW.files_per_jobs']):
208 <                raise CrabException("files_per_jobs no longer supported.  Quitting.")
209 <        except KeyError:
210 <            pass
211 <
212 <        ## Events per job
213 <        try:
214 <            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
215 <            self.selectEventsPerJob = 1
216 <        except KeyError:
217 <            self.eventsPerJob = -1
218 <            self.selectEventsPerJob = 0
219 <    
220 <        ## number of jobs
221 <        try:
222 <            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
223 <            self.selectNumberOfJobs = 1
224 <        except KeyError:
225 <            self.theNumberOfJobs = 0
226 <            self.selectNumberOfJobs = 0
227 <
228 <        try:
229 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
230 <            self.selectTotalNumberEvents = 1
231 <        except KeyError:
232 <            self.total_number_of_events = 0
233 <            self.selectTotalNumberEvents = 0
234 <
235 <        if self.pset != None: #CarlosDaniele
236 <             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
237 <                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
238 <                 raise CrabException(msg)
239 <        else:
240 <             if (self.selectNumberOfJobs == 0):
241 <                 msg = 'Must specify  number_of_jobs.'
242 <                 raise CrabException(msg)
243 <
244 <        ## source seed for pythia
245 <        try:
246 <            self.sourceSeed = int(cfg_params['CMSSW.pythia_seed'])
247 <        except KeyError:
248 <            self.sourceSeed = None
249 <            common.logger.debug(5,"No seed given")
250 <
251 <        try:
252 <            self.sourceSeedVtx = int(cfg_params['CMSSW.vtx_seed'])
253 <        except KeyError:
254 <            self.sourceSeedVtx = None
255 <            common.logger.debug(5,"No vertex seed given")
256 <
257 <        try:
258 <            self.sourceSeedG4 = int(cfg_params['CMSSW.g4_seed'])
259 <        except KeyError:
260 <            self.sourceSeedG4 = None
261 <            common.logger.debug(5,"No g4 sim hits seed given")
197 >            common.logger.debug("Additional input files: "+str(self.additional_inbox_files))
198 >        pass
199  
263        try:
264            self.sourceSeedMix = int(cfg_params['CMSSW.mix_seed'])
265        except KeyError:
266            self.sourceSeedMix = None
267            common.logger.debug(5,"No mix seed given")
200  
201 <        try:
202 <            self.firstRun = int(cfg_params['CMSSW.first_run'])
203 <        except KeyError:
204 <            self.firstRun = None
205 <            common.logger.debug(5,"No first run given")
206 <        if self.pset != None: #CarlosDaniele
207 <            ver = string.split(self.version,"_")
208 <            if (int(ver[1])>=1 and int(ver[2])>=5):
209 <                import PsetManipulator150 as pp
201 >        ## New method of dealing with seeds
202 >        self.incrementSeeds = []
203 >        self.preserveSeeds = []
204 >        if cfg_params.has_key('CMSSW.preserve_seeds'):
205 >            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
206 >            for tmp in tmpList:
207 >                tmp.strip()
208 >                self.preserveSeeds.append(tmp)
209 >        if cfg_params.has_key('CMSSW.increment_seeds'):
210 >            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
211 >            for tmp in tmpList:
212 >                tmp.strip()
213 >                self.incrementSeeds.append(tmp)
214 >
215 >        # Copy/return/publish
216 >        self.copy_data = int(cfg_params.get('USER.copy_data',0))
217 >        self.return_data = int(cfg_params.get('USER.return_data',0))
218 >        self.publish_data = int(cfg_params.get('USER.publish_data',0))
219 >        if (self.publish_data == 1):
220 >            if not cfg_params.has_key('USER.publish_data_name'):
221 >                raise CrabException('Cannot publish output data, because you did not specify USER.publish_data_name parameter in the crab.cfg file')
222              else:
223 <                import PsetManipulator as pp
280 <            PsetEdit = pp.PsetManipulator(self.pset) #Daniele Pset
223 >                self.processedDataset = cfg_params['USER.publish_data_name']
224  
225 +        self.conf = {}
226 +        self.conf['pubdata'] = None
227 +        # number of jobs requested to be created, limit obj splitting DD
228          #DBSDLS-start
229 <        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
229 >        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
230          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
231          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
232          self.jobDestination=[]  # Site destination(s) for each job (list of lists)
233          ## Perform the data location and discovery (based on DBS/DLS)
234          ## SL: Don't if NONE is specified as input (pythia use case)
235          blockSites = {}
236 <        if self.datasetPath:
237 <            blockSites = self.DataDiscoveryAndLocation(cfg_params)
238 <        #DBSDLS-end          
236 > #wmbs
237 >        self.automation = int(self.cfg_params.get('WMBS.automation',0))
238 >        if self.automation == 0:
239 >            if self.datasetPath:
240 >                blockSites = self.DataDiscoveryAndLocation(cfg_params)
241 >            #DBSDLS-end
242 >            self.conf['blockSites']=blockSites
243 >
244 >            ## Select Splitting
245 >            splitByRun = int(cfg_params.get('CMSSW.split_by_run',0))
246 >
247 >            if self.selectNoInput:
248 >                if self.pset == None:
249 >                    self.algo = 'ForScript'
250 >                else:
251 >                    self.algo = 'NoInput'
252 >                    self.conf['managedGenerators']=self.managedGenerators
253 >                    self.conf['generator']=self.generator
254 >            elif self.ads or self.lumiMask or self.lumiParams:
255 >                self.algo = 'LumiBased'
256 >                if splitByRun:
257 >                    msg = "Cannot combine split by run with lumi_mask, ADS, " \
258 >                          "or lumis_per_job. Use split by lumi mode instead."
259 >                    raise CrabException(msg)
260  
261 <        self.tgzNameWithPath = self.getTarBall(self.executable)
262 <    
296 <        ## Select Splitting
297 <        if self.selectNoInput:
298 <            if self.pset == None: #CarlosDaniele
299 <                self.jobSplittingForScript()
261 >            elif splitByRun ==1:
262 >                self.algo = 'RunBased'
263              else:
264 <                self.jobSplittingNoInput()
265 <        else:
303 <            self.jobSplittingByBlocks(blockSites)
264 >                self.algo = 'EventBased'
265 >            common.logger.debug("Job splitting method: %s" % self.algo)
266  
267 <        # modify Pset
268 <        if self.pset != None: #CarlosDaniele
269 <            try:
270 <                if (self.datasetPath): # standard job
271 <                    # allow to processa a fraction of events in a file
272 <                    PsetEdit.inputModule("INPUT")
273 <                    PsetEdit.maxEvent("INPUTMAXEVENTS")
274 <                    PsetEdit.skipEvent("INPUTSKIPEVENTS")
275 <                else:  # pythia like job
276 <                    PsetEdit.maxEvent(self.eventsPerJob)
277 <                    if (self.firstRun):
278 <                        PsetEdit.pythiaFirstRun("INPUTFIRSTRUN")  #First Run
279 <                    if (self.sourceSeed) :
280 <                        PsetEdit.pythiaSeed("INPUT")
281 <                        if (self.sourceSeedVtx) :
282 <                            PsetEdit.vtxSeed("INPUTVTX")
283 <                        if (self.sourceSeedG4) :
284 <                            PsetEdit.g4Seed("INPUTG4")
285 <                        if (self.sourceSeedMix) :
286 <                            PsetEdit.mixSeed("INPUTMIX")
287 <                # add FrameworkJobReport to parameter-set
288 <                PsetEdit.addCrabFJR(self.fjrFileName)
289 <                PsetEdit.psetWriter(self.configFilename())
290 <            except:
291 <                msg='Error while manipuliating ParameterSet: exiting...'
267 >            splitter = JobSplitter(self.cfg_params,self.conf)
268 >            self.dict = splitter.Algos()[self.algo]()
269 >
270 >        self.argsFile= '%s/arguments.xml'%common.work_space.shareDir()
271 >        self.rootArgsFilename= 'arguments'
272 >        # modify Pset only the first time
273 >        if isNew:
274 >            if self.pset != None: self.ModifyPset()
275 >
276 >            ## Prepare inputSandbox TarBall (only the first time)
277 >            self.tarNameWithPath = self.getTarBall(self.executable)
278 >
279 >
280 >    def ModifyPset(self):
281 >        import PsetManipulator as pp
282 >
283 >        # If pycfg_params set, fake out the config script
284 >        # to make it think it was called with those args
285 >        pycfg_params = self.cfg_params.get('CMSSW.pycfg_params',None)
286 >        if pycfg_params:
287 >            trueArgv = sys.argv
288 >            sys.argv = [self.pset]
289 >            sys.argv.extend(pycfg_params.split(' '))
290 >        PsetEdit = pp.PsetManipulator(self.pset)
291 >        if pycfg_params: # Restore original sys.argv
292 >            sys.argv = trueArgv
293 >
294 >        try:
295 >            # Add FrameworkJobReport to parameter-set, set max events.
296 >            # Reset later for data jobs by writeCFG which does all modifications
297 >            PsetEdit.maxEvent(1)
298 >            PsetEdit.skipEvent(0)
299 >            PsetEdit.psetWriter(self.configFilename())
300 >            ## If present, add TFileService to output files
301 >            if not int(self.cfg_params.get('CMSSW.skip_tfileservice_output',0)):
302 >                tfsOutput = PsetEdit.getTFileService()
303 >                if tfsOutput:
304 >                    if tfsOutput in self.output_file:
305 >                        common.logger.debug("Output from TFileService "+tfsOutput+" already in output files")
306 >                    else:
307 >                        outfileflag = True #output found
308 >                        self.output_file.append(tfsOutput)
309 >                        common.logger.info("Adding "+tfsOutput+" (from TFileService) to list of output files")
310 >                    pass
311 >                pass
312 >
313 >            # If requested, add PoolOutputModule to output files
314 >            ### FEDE FOR MULTI ###
315 >            #edmOutput = PsetEdit.getPoolOutputModule()
316 >            edmOutputDict = PsetEdit.getPoolOutputModule()
317 >            common.logger.debug("(test) edmOutputDict = "+str(edmOutputDict))
318 >            filter_dict = {}
319 >            for key in edmOutputDict.keys():
320 >                filter_dict[key]=edmOutputDict[key]['dataset']
321 >            common.logger.debug("(test) filter_dict for multi =  "+str(filter_dict))
322 >
323 >            #### in CMSSW.sh: export var_filter
324 >
325 >            self.var_filter = json.dumps(filter_dict)
326 >            common.logger.debug("(test) var_filter for multi =  "+self.var_filter)
327 >
328 >            edmOutput = edmOutputDict.keys()
329 >            if int(self.cfg_params.get('CMSSW.get_edm_output',0)):
330 >                if edmOutput:
331 >                    for outputFile in edmOutput:
332 >                        if outputFile in self.output_file:
333 >                            common.logger.debug("Output from PoolOutputModule "+outputFile+" already in output files")
334 >                        else:
335 >                            self.output_file.append(outputFile)
336 >                            common.logger.info("Adding "+outputFile+" (from PoolOutputModule) to list of output files")
337 >            # not requested, check anyhow to avoid accidental T2 overload
338 >            else:
339 >                if edmOutput:
340 >                    missedFiles = []
341 >                    for outputFile in edmOutput:
342 >                        if outputFile not in self.output_file:
343 >                            missedFiles.append(outputFile)
344 >                    if missedFiles:
345 >                        msg  = "ERROR: PoolOutputModule(s) are present in your ParameteSet %s \n"%self.pset
346 >                        msg += "    but the file(s) produced ( %s ) are not in the list of output files\n" % ', '.join(missedFiles)
347 >                        msg += "WARNING: please remove them. If you want to keep them, add the file(s) to output_files or use CMSSW.get_edm_output = 1\n"
348 >                        if int(self.cfg_params.get('CMSSW.ignore_edm_output',0)):
349 >                            msg += "    CMSSW.ignore_edm_output==1 : Hope you know what you are doing...\n"
350 >                            common.logger.info(msg)
351 >                        else :
352 >                            raise CrabException(msg)
353 >
354 >            if (PsetEdit.getBadFilesSetting()):
355 >                msg = "WARNING: You have set skipBadFiles to True. This will continue processing on some errors and you may not be notified."
356 >                common.logger.info(msg)
357 >
358 >        except CrabException, msg:
359 >            common.logger.info(str(msg))
360 >            msg='Error while manipulating ParameterSet (see previous message, if any): exiting...'
361 >            raise CrabException(msg)
362 >
363 >        valid = re.compile('^[\w\.\-]+$')
364 >        for fileName in self.output_file:
365 >            if not valid.match(fileName):
366 >                msg = "The file %s may only contain alphanumeric characters and -, _, ." % fileName
367                  raise CrabException(msg)
368  
369 +
370      def DataDiscoveryAndLocation(self, cfg_params):
371  
372          import DataDiscovery
335        import DataDiscovery_DBS2
373          import DataLocation
374 <        common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
374 >        common.logger.log(10-1,"CMSSW::DataDiscoveryAndLocation()")
375  
376          datasetPath=self.datasetPath
377  
378          ## Contact the DBS
379 <        common.logger.message("Contacting Data Discovery Services ...")
379 >        common.logger.info("Contacting Data Discovery Services ...")
380          try:
381 <
345 <            if self.use_dbs_1 == 1 :
346 <                self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
347 <            else :
348 <                self.pubdata=DataDiscovery_DBS2.DataDiscovery_DBS2(datasetPath, cfg_params)
381 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
382              self.pubdata.fetchDBSInfo()
383  
384          except DataDiscovery.NotExistingDatasetError, ex :
# Line 357 | Line 390 | class Cmssw(JobType):
390          except DataDiscovery.DataDiscoveryError, ex:
391              msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
392              raise CrabException(msg)
360        except DataDiscovery_DBS2.NotExistingDatasetError_DBS2, ex :
361            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
362            raise CrabException(msg)
363        except DataDiscovery_DBS2.NoDataTierinProvenanceError_DBS2, ex :
364            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
365            raise CrabException(msg)
366        except DataDiscovery_DBS2.DataDiscoveryError_DBS2, ex:
367            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
368            raise CrabException(msg)
393  
394          self.filesbyblock=self.pubdata.getFiles()
395 <        self.eventsbyblock=self.pubdata.getEventsPerBlock()
372 <        self.eventsbyfile=self.pubdata.getEventsPerFile()
395 >        self.conf['pubdata']=self.pubdata
396  
397          ## get max number of events
398 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
398 >        self.maxEvents=self.pubdata.getMaxEvents()
399  
400          ## Contact the DLS and build a list of sites hosting the fileblocks
401          try:
402              dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
403              dataloc.fetchDLSInfo()
404 +
405          except DataLocation.DataLocationError , ex:
406              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
407              raise CrabException(msg)
384        
408  
409 <        sites = dataloc.getSites()
409 >
410 >        unsorted_sites = dataloc.getSites()
411 >        sites = self.filesbyblock.fromkeys(self.filesbyblock,'')
412 >        for lfn in self.filesbyblock.keys():
413 >            if unsorted_sites.has_key(lfn):
414 >                sites[lfn]=unsorted_sites[lfn]
415 >            else:
416 >                sites[lfn]=[]
417 >
418 >        if len(sites)==0:
419 >            msg = 'ERROR ***: no location for any of the blocks of this dataset: \n\t %s \n'%datasetPath
420 >            msg += "\tMaybe the dataset is located only at T1's (or at T0), where analysis jobs are not allowed\n"
421 >            msg += "\tPlease check DataDiscovery page https://cmsweb.cern.ch/dbs_discovery/\n"
422 >            raise CrabException(msg)
423 >
424          allSites = []
425          listSites = sites.values()
426          for listSite in listSites:
427              for oneSite in listSite:
428                  allSites.append(oneSite)
429 <        allSites = self.uniquelist(allSites)
429 >        [allSites.append(it) for it in allSites if not allSites.count(it)]
430 >
431  
432          # screen output
433 <        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
433 >        if self.ads or self.lumiMask:
434 >            common.logger.info("Requested (A)DS %s has %s block(s)." %
435 >                               (datasetPath, len(self.filesbyblock.keys())))
436 >        else:
437 >            common.logger.info("Requested dataset: " + datasetPath + \
438 >                " has " + str(self.maxEvents) + " events in " + \
439 >                str(len(self.filesbyblock.keys())) + " blocks.\n")
440  
441          return sites
398    
399    def jobSplittingByBlocks(self, blockSites):
400        """
401        Perform job splitting. Jobs run over an integer number of files
402        and no more than one block.
403        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
404        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
405                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
406                  self.maxEvents, self.filesbyblock
407        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
408              self.total_number_of_jobs - Total # of jobs
409              self.list_of_args - File(s) job will run on (a list of lists)
410        """
411
412        # ---- Handle the possible job splitting configurations ---- #
413        if (self.selectTotalNumberEvents):
414            totalEventsRequested = self.total_number_of_events
415        if (self.selectEventsPerJob):
416            eventsPerJobRequested = self.eventsPerJob
417            if (self.selectNumberOfJobs):
418                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
419
420        # If user requested all the events in the dataset
421        if (totalEventsRequested == -1):
422            eventsRemaining=self.maxEvents
423        # If user requested more events than are in the dataset
424        elif (totalEventsRequested > self.maxEvents):
425            eventsRemaining = self.maxEvents
426            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
427        # If user requested less events than are in the dataset
428        else:
429            eventsRemaining = totalEventsRequested
442  
431        # If user requested more events per job than are in the dataset
432        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
433            eventsPerJobRequested = self.maxEvents
434
435        # For user info at end
436        totalEventCount = 0
437
438        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
439            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
440
441        if (self.selectNumberOfJobs):
442            common.logger.message("May not create the exact number_of_jobs requested.")
443
444        if ( self.ncjobs == 'all' ) :
445            totalNumberOfJobs = 999999999
446        else :
447            totalNumberOfJobs = self.ncjobs
448            
449
450        blocks = blockSites.keys()
451        blockCount = 0
452        # Backup variable in case self.maxEvents counted events in a non-included block
453        numBlocksInDataset = len(blocks)
454
455        jobCount = 0
456        list_of_lists = []
457
458        # list tracking which jobs are in which jobs belong to which block
459        jobsOfBlock = {}
460
461        # ---- Iterate over the blocks in the dataset until ---- #
462        # ---- we've met the requested total # of events    ---- #
463        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
464            block = blocks[blockCount]
465            blockCount += 1
466            if block not in jobsOfBlock.keys() :
467                jobsOfBlock[block] = []
468            
469            if self.eventsbyblock.has_key(block) :
470                numEventsInBlock = self.eventsbyblock[block]
471                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
472            
473                files = self.filesbyblock[block]
474                numFilesInBlock = len(files)
475                if (numFilesInBlock <= 0):
476                    continue
477                fileCount = 0
443  
444 <                # ---- New block => New job ---- #
480 <                parString = "\\{"
481 <                # counter for number of events in files currently worked on
482 <                filesEventCount = 0
483 <                # flag if next while loop should touch new file
484 <                newFile = 1
485 <                # job event counter
486 <                jobSkipEventCount = 0
487 <            
488 <                # ---- Iterate over the files in the block until we've met the requested ---- #
489 <                # ---- total # of events or we've gone over all the files in this block  ---- #
490 <                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
491 <                    file = files[fileCount]
492 <                    if newFile :
493 <                        try:
494 <                            numEventsInFile = self.eventsbyfile[file]
495 <                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
496 <                            # increase filesEventCount
497 <                            filesEventCount += numEventsInFile
498 <                            # Add file to current job
499 <                            parString += '\\\"' + file + '\\\"\,'
500 <                            newFile = 0
501 <                        except KeyError:
502 <                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
503 <                        
504 <
505 <                    # if less events in file remain than eventsPerJobRequested
506 <                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) :
507 <                        # if last file in block
508 <                        if ( fileCount == numFilesInBlock-1 ) :
509 <                            # end job using last file, use remaining events in block
510 <                            # close job and touch new file
511 <                            fullString = parString[:-2]
512 <                            fullString += '\\}'
513 <                            list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
514 <                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
515 <                            self.jobDestination.append(blockSites[block])
516 <                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
517 <                            # fill jobs of block dictionary
518 <                            jobsOfBlock[block].append(jobCount+1)
519 <                            # reset counter
520 <                            jobCount = jobCount + 1
521 <                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
522 <                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
523 <                            jobSkipEventCount = 0
524 <                            # reset file
525 <                            parString = "\\{"
526 <                            filesEventCount = 0
527 <                            newFile = 1
528 <                            fileCount += 1
529 <                        else :
530 <                            # go to next file
531 <                            newFile = 1
532 <                            fileCount += 1
533 <                    # if events in file equal to eventsPerJobRequested
534 <                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
535 <                        # close job and touch new file
536 <                        fullString = parString[:-2]
537 <                        fullString += '\\}'
538 <                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
539 <                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
540 <                        self.jobDestination.append(blockSites[block])
541 <                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
542 <                        jobsOfBlock[block].append(jobCount+1)
543 <                        # reset counter
544 <                        jobCount = jobCount + 1
545 <                        totalEventCount = totalEventCount + eventsPerJobRequested
546 <                        eventsRemaining = eventsRemaining - eventsPerJobRequested
547 <                        jobSkipEventCount = 0
548 <                        # reset file
549 <                        parString = "\\{"
550 <                        filesEventCount = 0
551 <                        newFile = 1
552 <                        fileCount += 1
553 <                        
554 <                    # if more events in file remain than eventsPerJobRequested
555 <                    else :
556 <                        # close job but don't touch new file
557 <                        fullString = parString[:-2]
558 <                        fullString += '\\}'
559 <                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
560 <                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
561 <                        self.jobDestination.append(blockSites[block])
562 <                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
563 <                        jobsOfBlock[block].append(jobCount+1)
564 <                        # increase counter
565 <                        jobCount = jobCount + 1
566 <                        totalEventCount = totalEventCount + eventsPerJobRequested
567 <                        eventsRemaining = eventsRemaining - eventsPerJobRequested
568 <                        # calculate skip events for last file
569 <                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
570 <                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
571 <                        # remove all but the last file
572 <                        filesEventCount = self.eventsbyfile[file]
573 <                        parString = "\\{"
574 <                        parString += '\\\"' + file + '\\\"\,'
575 <                    pass # END if
576 <                pass # END while (iterate over files in the block)
577 <        pass # END while (iterate over blocks in the dataset)
578 <        self.ncjobs = self.total_number_of_jobs = jobCount
579 <        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
580 <            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
581 <        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
582 <        
583 <        # screen output
584 <        screenOutput = "List of jobs and available destination sites:\n\n"
444 >    def split(self, jobParams,firstJobID):
445  
446 <        # keep trace of block with no sites to print a warning at the end
447 <        noSiteBlock = []
448 <        bloskNoSite = []
449 <
450 <        blockCounter = 0
451 <        for block in blocks:
452 <            if block in jobsOfBlock.keys() :
453 <                blockCounter += 1
594 <                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
595 <                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
596 <                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
597 <                    bloskNoSite.append( blockCounter )
598 <        
599 <        common.logger.message(screenOutput)
600 <        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
601 <            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
602 <            virgola = ""
603 <            if len(bloskNoSite) > 1:
604 <                virgola = ","
605 <            for block in bloskNoSite:
606 <                msg += ' ' + str(block) + virgola
607 <            msg += '\n               Related jobs:\n                 '
608 <            virgola = ""
609 <            if len(noSiteBlock) > 1:
610 <                virgola = ","
611 <            for range_jobs in noSiteBlock:
612 <                msg += str(range_jobs) + virgola
613 <            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
614 <            common.logger.message(msg)
446 >        jobParams = self.dict['args']
447 >        njobs = self.dict['njobs']
448 >        self.jobDestination = self.dict['jobDestination']
449 >
450 >        if njobs == 0:
451 >            raise CrabException("Asked to split zero jobs: aborting")
452 >        if not self.server and not self.local and njobs > 500:
453 >            raise CrabException("The CRAB client will not submit more than 500 jobs. You must use the server mode.")
454  
455 <        self.list_of_args = list_of_lists
455 >        # create the empty structure
456 >        for i in range(njobs):
457 >            jobParams.append("")
458 >
459 >        listID=[]
460 >        listField=[]
461 >        listDictions=[]
462 >        exist= os.path.exists(self.argsFile)
463 >        for id in range(njobs):
464 >            job = id + int(firstJobID)
465 >            listID.append(job+1)
466 >            job_ToSave ={}
467 >            concString = ' '
468 >            argu=''
469 >            str_argu = str(job+1)
470 >            if len(jobParams[id]):
471 >                argu = {'JobID': job+1}
472 >                for i in range(len(jobParams[id])):
473 >                    argu[self.dict['params'][i]]=jobParams[id][i]
474 >                    if len(jobParams[id])==1: self.NumEvents = jobParams[id][i]
475 >                # just for debug
476 >                str_argu += concString.join(jobParams[id])
477 >            if argu != '': listDictions.append(argu)
478 >            job_ToSave['arguments']= '%d %d'%( (job+1), 0)
479 >            job_ToSave['dlsDestination']= self.jobDestination[id]
480 >            listField.append(job_ToSave)
481 >            from ProdCommon.SiteDB.CmsSiteMapper import CmsSEMap
482 >            cms_se = CmsSEMap()
483 >            msg="Job  %s  Arguments:  %s\n"%(str(job+1),str_argu)
484 >            msg+="\t  Destination: %s "%(str(self.jobDestination[id]))
485 >            SEDestination = [cms_se[dest] for dest in self.jobDestination[id]]
486 >            msg+="\t  CMSDestination: %s "%(str(SEDestination))
487 >            common.logger.log(10-1,msg)
488 >        # write xml
489 >        if len(listDictions):
490 >            if exist==False: self.CreateXML()
491 >            self.addEntry(listDictions)
492 >        common._db.updateJob_(listID,listField)
493          return
494  
495 <    def jobSplittingNoInput(self):
495 >    def CreateXML(self):
496          """
621        Perform job splitting based on number of event per job
497          """
498 <        common.logger.debug(5,'Splitting per events')
499 <        
625 <        if (self.selectEventsPerJob):
626 <            common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
627 <        if (self.selectNumberOfJobs):
628 <            common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
629 <        if (self.selectTotalNumberEvents):
630 <            common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
631 <
632 <        if (self.total_number_of_events < 0):
633 <            msg='Cannot split jobs per Events with "-1" as total number of events'
634 <            raise CrabException(msg)
635 <
636 <        if (self.selectEventsPerJob):
637 <            if (self.selectTotalNumberEvents):
638 <                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
639 <            elif(self.selectNumberOfJobs) :  
640 <                self.total_number_of_jobs =self.theNumberOfJobs
641 <                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
642 <
643 <        elif (self.selectNumberOfJobs) :
644 <            self.total_number_of_jobs = self.theNumberOfJobs
645 <            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
646 <
647 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
648 <
649 <        # is there any remainder?
650 <        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
651 <
652 <        common.logger.debug(5,'Check  '+str(check))
653 <
654 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
655 <        if check > 0:
656 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
657 <
658 <        # argument is seed number.$i
659 <        self.list_of_args = []
660 <        for i in range(self.total_number_of_jobs):
661 <            ## Since there is no input, any site is good
662 <           # self.jobDestination.append(["Any"])
663 <            self.jobDestination.append([""]) #must be empty to write correctly the xml
664 <            args=[]
665 <            if (self.firstRun):
666 <                    ## pythia first run
667 <                #self.list_of_args.append([(str(self.firstRun)+str(i))])
668 <                args.append(str(self.firstRun)+str(i))
669 <            else:
670 <                ## no first run
671 <                #self.list_of_args.append([str(i)])
672 <                args.append(str(i))
673 <            if (self.sourceSeed):
674 <                args.append(str(self.sourceSeed)+str(i))
675 <                if (self.sourceSeedVtx):
676 <                    ## + vtx random seed
677 <                    args.append(str(self.sourceSeedVtx)+str(i))
678 <                if (self.sourceSeedG4):
679 <                    ## + G4 random seed
680 <                    args.append(str(self.sourceSeedG4)+str(i))
681 <                if (self.sourceSeedMix):    
682 <                    ## + Mix random seed
683 <                    args.append(str(self.sourceSeedMix)+str(i))
684 <                pass
685 <            pass
686 <            self.list_of_args.append(args)
687 <        pass
688 <            
689 <        # print self.list_of_args
690 <
498 >        result = IMProvNode( self.rootArgsFilename )
499 >        outfile = file( self.argsFile, 'w').write(str(result))
500          return
501  
502 <
694 <    def jobSplittingForScript(self):#CarlosDaniele
695 <        """
696 <        Perform job splitting based on number of job
502 >    def addEntry(self, listDictions):
503          """
504 <        common.logger.debug(5,'Splitting per job')
699 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
700 <
701 <        self.total_number_of_jobs = self.theNumberOfJobs
702 <
703 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
504 >        _addEntry_
505  
506 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
507 <
508 <        # argument is seed number.$i
509 <        self.list_of_args = []
510 <        for i in range(self.total_number_of_jobs):
511 <            ## Since there is no input, any site is good
512 <           # self.jobDestination.append(["Any"])
513 <            self.jobDestination.append([""])
514 <            ## no random seed
714 <            self.list_of_args.append([str(i)])
506 >        add an entry to the xml file
507 >        """
508 >        ## load xml
509 >        improvDoc = loadIMProvFile(self.argsFile)
510 >        entrname= 'Job'
511 >        for dictions in listDictions:
512 >           report = IMProvNode(entrname , None, **dictions)
513 >           improvDoc.addNode(report)
514 >        outfile = file( self.argsFile, 'w').write(str(improvDoc))
515          return
516  
717    def split(self, jobParams):
718
719        common.jobDB.load()
720        #### Fabio
721        njobs = self.total_number_of_jobs
722        arglist = self.list_of_args
723        # create the empty structure
724        for i in range(njobs):
725            jobParams.append("")
726        
727        for job in range(njobs):
728            jobParams[job] = arglist[job]
729            # print str(arglist[job])
730            # print jobParams[job]
731            common.jobDB.setArguments(job, jobParams[job])
732            common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job]))
733            common.jobDB.setDestination(job, self.jobDestination[job])
734
735        common.jobDB.save()
736        return
737    
738    def getJobTypeArguments(self, nj, sched):
739        result = ''
740        for i in common.jobDB.arguments(nj):
741            result=result+str(i)+" "
742        return result
743  
517      def numberOfJobs(self):
518 <        # Fabio
519 <        return self.total_number_of_jobs
518 > #wmbs
519 >        if self.automation==0:
520 >           return self.dict['njobs']
521 >        else:
522 >           return None
523  
524      def getTarBall(self, exe):
525          """
526          Return the TarBall with lib and exe
527          """
528 <        
753 <        # if it exist, just return it
754 <        #
755 <        # Marco. Let's start to use relative path for Boss XML files
756 <        #
757 <        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
528 >        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
529          if os.path.exists(self.tgzNameWithPath):
530              return self.tgzNameWithPath
531  
# Line 767 | Line 538 | class Cmssw(JobType):
538  
539          # First of all declare the user Scram area
540          swArea = self.scram.getSWArea_()
770        #print "swArea = ", swArea
771        # swVersion = self.scram.getSWVersion()
772        # print "swVersion = ", swVersion
541          swReleaseTop = self.scram.getReleaseTop_()
542 <        #print "swReleaseTop = ", swReleaseTop
775 <        
542 >
543          ## check if working area is release top
544          if swReleaseTop == '' or swArea == swReleaseTop:
545 +            common.logger.debug("swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
546              return
547  
548          import tarfile
# Line 785 | Line 553 | class Cmssw(JobType):
553                  exeWithPath = self.scram.findFile_(executable)
554                  if ( not exeWithPath ):
555                      raise CrabException('User executable '+executable+' not found')
556 <    
556 >
557                  ## then check if it's private or not
558                  if exeWithPath.find(swReleaseTop) == -1:
559                      # the exe is private, so we must ship
560 <                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
560 >                    common.logger.debug("Exe "+exeWithPath+" to be tarred")
561                      path = swArea+'/'
562                      # distinguish case when script is in user project area or given by full path somewhere else
563                      if exeWithPath.find(path) >= 0 :
# Line 801 | Line 569 | class Cmssw(JobType):
569                  else:
570                      # the exe is from release, we'll find it on WN
571                      pass
572 <    
572 >
573              ## Now get the libraries: only those in local working area
574 +            tar.dereference=True
575              libDir = 'lib'
576              lib = swArea+'/' +libDir
577 <            common.logger.debug(5,"lib "+lib+" to be tarred")
577 >            common.logger.debug("lib "+lib+" to be tarred")
578              if os.path.exists(lib):
579                  tar.add(lib,libDir)
580 <    
580 >
581              ## Now check if module dir is present
582              moduleDir = 'module'
583              module = swArea + '/' + moduleDir
584              if os.path.isdir(module):
585                  tar.add(module,moduleDir)
586 +            tar.dereference=False
587  
588              ## Now check if any data dir(s) is present
589 <            swAreaLen=len(swArea)
590 <            for root, dirs, files in os.walk(swArea):
591 <                if "data" in dirs:
592 <                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
593 <                    tar.add(root+"/data",root[swAreaLen:]+"/data")
594 <
595 <            ## Add ProdAgent dir to tar
596 <            paDir = 'ProdAgentApi'
597 <            pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi'
598 <            if os.path.isdir(pa):
599 <                tar.add(pa,paDir)
600 <
601 <            ### FEDE FOR DBS PUBLICATION
602 <            ## Add PRODCOMMON dir to tar
603 <            prodcommonDir = 'ProdCommon'
604 <            prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon'
605 <            if os.path.isdir(prodcommonPath):
606 <                tar.add(prodcommonPath,prodcommonDir)
607 <            #############################    
608 <        
609 <            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
589 >            self.dataExist = False
590 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
591 >            while len(todo_list):
592 >                entry, name = todo_list.pop()
593 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
594 >                    continue
595 >                if os.path.isdir(swArea+"/src/"+entry):
596 >                    entryPath = entry + '/'
597 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
598 >                    if name == 'data':
599 >                        self.dataExist=True
600 >                        common.logger.debug("data "+entry+" to be tarred")
601 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
602 >                    pass
603 >                pass
604 >
605 >            ### CMSSW ParameterSet
606 >            if not self.pset is None:
607 >                cfg_file = common.work_space.jobDir()+self.configFilename()
608 >                pickleFile = common.work_space.jobDir()+self.configFilename() + '.pkl'
609 >                tar.add(cfg_file,self.configFilename())
610 >                tar.add(pickleFile,self.configFilename() + '.pkl')
611 >
612 >            try:
613 >                crab_cfg_file = common.work_space.shareDir()+'/crab.cfg'
614 >                tar.add(crab_cfg_file,'crab.cfg')
615 >            except:
616 >                pass
617 >
618 >            ## Add ProdCommon dir to tar
619 >            prodcommonDir = './'
620 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
621 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools', \
622 >                           'ProdCommon/Core', 'ProdCommon/MCPayloads', 'IMProv', 'ProdCommon/Storage', \
623 >                           'WMCore/__init__.py','WMCore/Algorithms']
624 >            for file in neededStuff:
625 >                tar.add(prodcommonPath+file,prodcommonDir+file)
626 >
627 >            ##### ML stuff
628 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
629 >            path=os.environ['CRABDIR'] + '/python/'
630 >            for file in ML_file_list:
631 >                tar.add(path+file,file)
632 >
633 >            ##### Utils
634 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py','cmscp.py']
635 >            for file in Utils_file_list:
636 >                tar.add(path+file,file)
637 >
638 >            ##### AdditionalFiles
639 >            tar.dereference=True
640 >            for file in self.additional_inbox_files:
641 >                tar.add(file,string.split(file,'/')[-1])
642 >            tar.dereference=False
643 >            common.logger.log(10-1,"Files in "+self.tgzNameWithPath+" : "+str(tar.getnames()))
644 >
645              tar.close()
646 <        except :
647 <            raise CrabException('Could not create tar-ball')
646 >        except IOError, exc:
647 >            msg = 'Could not create tar-ball %s \n'%self.tgzNameWithPath
648 >            msg += str(exc)
649 >            raise CrabException(msg)
650 >        except tarfile.TarError, exc:
651 >            msg = 'Could not create tar-ball %s \n'%self.tgzNameWithPath
652 >            msg += str(exc)
653 >            raise CrabException(msg)
654  
844        ## check for tarball size
655          tarballinfo = os.stat(self.tgzNameWithPath)
656          if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
657 <            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
657 >            if not self.server:
658 >                msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + \
659 >                         str(self.MaxTarBallSize) +'MB input sandbox limit \n'
660 >                msg += '      and not supported by the direct GRID submission system.\n'
661 >                msg += '      Please use the CRAB server mode by setting server_name=<NAME> in section [CRAB] of your crab.cfg.\n'
662 >                msg += '      For further infos please see https://twiki.cern.ch/twiki/bin/view/CMS/SWGuideCrabServerForUsers#Server_available_for_users'
663 >            else:
664 >                msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' +  \
665 >                        str(self.MaxTarBallSize) +'MB input sandbox limit in the server.'
666 >            raise CrabException(msg)
667  
668          ## create tar-ball with ML stuff
850        self.MLtgzfile =  common.work_space.pathForTgz()+'share/MLfiles.tgz'
851        try:
852            tar = tarfile.open(self.MLtgzfile, "w:gz")
853            path=os.environ['CRABDIR'] + '/python/'
854            for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py']:
855                tar.add(path+file,file)
856            common.logger.debug(5,"Files added to "+self.MLtgzfile+" : "+str(tar.getnames()))
857            tar.close()
858        except :
859            raise CrabException('Could not create ML files tar-ball')
860        
861        return
862        
863    def additionalInputFileTgz(self):
864        """
865        Put all additional files into a tar ball and return its name
866        """
867        import tarfile
868        tarName=  common.work_space.pathForTgz()+'share/'+self.additional_tgz_name
869        tar = tarfile.open(tarName, "w:gz")
870        for file in self.additional_inbox_files:
871            tar.add(file,string.split(file,'/')[-1])
872        common.logger.debug(5,"Files added to "+self.additional_tgz_name+" : "+str(tar.getnames()))
873        tar.close()
874        return tarName
669  
670 <    def wsSetupEnvironment(self, nj):
670 >    def wsSetupEnvironment(self, nj=0):
671          """
672          Returns part of a job script which prepares
673          the execution environment for the job 'nj'.
674          """
675 <        # Prepare JobType-independent part
882 <        txt = ''
883 <  
884 <        ## OLI_Daniele at this level  middleware already known
675 >        psetName = 'pset.py'
676  
677 <        txt += 'if [ $middleware == LCG ]; then \n'
678 <        txt += '    echo "### First set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
679 <        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
680 <        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
677 >        # Prepare JobType-independent part
678 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
679 >        txt += 'echo ">>> setup environment"\n'
680 >        txt += 'echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
681 >        txt += 'export SCRAM_ARCH=' + self.executable_arch + '\n'
682 >        txt += 'echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
683 >        txt += 'if [ $middleware == LCG ] || [ $middleware == CAF ] || [ $middleware == LSF ]; then \n'
684          txt += self.wsSetupCMSLCGEnvironment_()
685          txt += 'elif [ $middleware == OSG ]; then\n'
686          txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
687 <        txt += '    echo "Created working directory: $WORKING_DIR"\n'
688 <        txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
689 <        txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
690 <        txt += '    echo "JOB_EXIT_STATUS = 10016"\n'
897 <        txt += '    echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
898 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
899 <        txt += '        rm -f $RUNTIME_AREA/$repo \n'
900 <        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
901 <        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
902 <        txt += '        exit 1\n'
687 >        txt += '    if [ ! $? == 0 ] ;then\n'
688 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
689 >        txt += '        job_exit_code=10016\n'
690 >        txt += '        func_exit\n'
691          txt += '    fi\n'
692 +        txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
693          txt += '\n'
694          txt += '    echo "Change to working directory: $WORKING_DIR"\n'
695          txt += '    cd $WORKING_DIR\n'
696 <        txt += self.wsSetupCMSOSGEnvironment_()
697 <        txt += '    echo "### Set SCRAM ARCH to ' + self.executable_arch + ' ###"\n'
698 <        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
696 >        txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
697 >        txt += self.wsSetupCMSOSGEnvironment_()
698 >        #Setup SGE Environment
699 >        txt += 'elif [ $middleware == SGE ]; then\n'
700 >        txt += self.wsSetupCMSLCGEnvironment_()
701 >
702 >        txt += 'elif [ $middleware == ARC ]; then\n'
703 >        txt += self.wsSetupCMSLCGEnvironment_()
704 >
705 >        #Setup PBS Environment
706 >        txt += 'elif [ $middleware == PBS ]; then\n'
707 >        txt += self.wsSetupCMSLCGEnvironment_()
708 >
709          txt += 'fi\n'
710  
711          # Prepare JobType-specific part
712          scram = self.scram.commandName()
713          txt += '\n\n'
714 <        txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
714 >        txt += 'echo ">>> specific cmssw setup environment:"\n'
715 >        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
716          txt += scram+' project CMSSW '+self.version+'\n'
717          txt += 'status=$?\n'
718          txt += 'if [ $status != 0 ] ; then\n'
719 <        txt += '   echo "SET_EXE_ENV 10034 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
720 <        txt += '   echo "JOB_EXIT_STATUS = 10034"\n'
721 <        txt += '   echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n'
922 <        txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
923 <        txt += '   rm -f $RUNTIME_AREA/$repo \n'
924 <        txt += '   echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
925 <        txt += '   echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
926 <        ## OLI_Daniele
927 <        txt += '    if [ $middleware == OSG ]; then \n'
928 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
929 <        txt += '        cd $RUNTIME_AREA\n'
930 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
931 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
932 <        txt += '            echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
933 <        txt += '            echo "JOB_EXIT_STATUS = 10018"\n'
934 <        txt += '            echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
935 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
936 <        txt += '            rm -f $RUNTIME_AREA/$repo \n'
937 <        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
938 <        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
939 <        txt += '        fi\n'
940 <        txt += '    fi \n'
941 <        txt += '   exit 1 \n'
719 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
720 >        txt += '    job_exit_code=10034\n'
721 >        txt += '    func_exit\n'
722          txt += 'fi \n'
943        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
723          txt += 'cd '+self.version+'\n'
724 <        ########## FEDE FOR DBS2 ######################
725 <        txt += 'SOFTWARE_DIR=`pwd`\n'
947 <        txt += 'echo SOFTWARE_DIR=$SOFTWARE_DIR \n'
948 <        ###############################################
949 <        ### needed grep for bug in scramv1 ###
950 <        txt += scram+' runtime -sh\n'
724 >        txt += 'SOFTWARE_DIR=`pwd`; export SOFTWARE_DIR\n'
725 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
726          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
727 <        txt += 'echo $PATH\n'
728 <
727 >        txt += 'if [ $? != 0 ] ; then\n'
728 >        txt += '    echo "ERROR ==> Problem with the command: "\n'
729 >        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
730 >        txt += '    job_exit_code=10034\n'
731 >        txt += '    func_exit\n'
732 >        txt += 'fi \n'
733          # Handle the arguments:
734          txt += "\n"
735 <        txt += "## number of arguments (first argument always jobnumber)\n"
735 >        txt += "## number of arguments (first argument always jobnumber, the second is the resubmission number)\n"
736          txt += "\n"
737 < #        txt += "narg=$#\n"
959 <        txt += "if [ $nargs -lt 2 ]\n"
737 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
738          txt += "then\n"
739 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$nargs+ \n"
740 <        txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
741 <        txt += '    echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n'
964 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
965 <        txt += '    rm -f $RUNTIME_AREA/$repo \n'
966 <        txt += '    echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
967 <        txt += '    echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
968 <        ## OLI_Daniele
969 <        txt += '    if [ $middleware == OSG ]; then \n'
970 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
971 <        txt += '        cd $RUNTIME_AREA\n'
972 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
973 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
974 <        txt += '            echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
975 <        txt += '            echo "JOB_EXIT_STATUS = 50114"\n'
976 <        txt += '            echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
977 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
978 <        txt += '            rm -f $RUNTIME_AREA/$repo \n'
979 <        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
980 <        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
981 <        txt += '        fi\n'
982 <        txt += '    fi \n'
983 <        txt += "    exit 1\n"
739 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
740 >        txt += '    job_exit_code=50113\n'
741 >        txt += "    func_exit\n"
742          txt += "fi\n"
743          txt += "\n"
744  
745          # Prepare job-specific part
746          job = common.job_list[nj]
747 <        ### FEDE FOR DBS OUTPUT PUBLICATION
990 <        if (self.datasetPath):
747 >        if (self.datasetPath):
748              txt += '\n'
749              txt += 'DatasetPath='+self.datasetPath+'\n'
750  
751 <            datasetpath_split = self.datasetPath.split("/")
752 <            
996 <            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
997 <            txt += 'DataTier='+datasetpath_split[2]+'\n'
998 <            #txt += 'ProcessedDataset='+datasetpath_split[3]+'\n'
751 >            txt += 'PrimaryDataset='+self.primaryDataset +'\n'
752 >            txt += 'DataTier='+self.dataTier+'\n'
753              txt += 'ApplicationFamily=cmsRun\n'
754  
755          else:
756              txt += 'DatasetPath=MCDataTier\n'
757              txt += 'PrimaryDataset=null\n'
758              txt += 'DataTier=null\n'
1005            #txt += 'ProcessedDataset=null\n'
759              txt += 'ApplicationFamily=MCDataTier\n'
760 <        if self.pset != None: #CarlosDaniele
760 >        if self.pset != None:
761              pset = os.path.basename(job.configFilename())
762 +            pkl  = os.path.basename(job.configFilename()) + '.pkl'
763              txt += '\n'
764              txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
765 <            if (self.datasetPath): # standard job
1012 <                #txt += 'InputFiles=$2\n'
1013 <                txt += 'InputFiles=${args[1]}\n'
1014 <                txt += 'MaxEvents=${args[2]}\n'
1015 <                txt += 'SkipEvents=${args[3]}\n'
1016 <                txt += 'echo "Inputfiles:<$InputFiles>"\n'
1017 <                txt += 'sed "s#{\'INPUT\'}#$InputFiles#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1018 <                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
1019 <                txt += 'sed "s#INPUTMAXEVENTS#$MaxEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1020 <                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
1021 <                txt += 'sed "s#INPUTSKIPEVENTS#$SkipEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1022 <            else:  # pythia like job
1023 <                seedIndex=1
1024 <                if (self.firstRun):
1025 <                    txt += 'FirstRun=${args['+str(seedIndex)+']}\n'
1026 <                    txt += 'echo "FirstRun: <$FirstRun>"\n'
1027 <                    txt += 'sed "s#\<INPUTFIRSTRUN\>#$FirstRun#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1028 <                    seedIndex=seedIndex+1
1029 <
1030 <                if (self.sourceSeed):
1031 <                    txt += 'Seed=${args['+str(seedIndex)+']}\n'
1032 <                    txt += 'sed "s#\<INPUT\>#$Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1033 <                    seedIndex=seedIndex+1
1034 <                    ## the following seeds are not always present
1035 <                    if (self.sourceSeedVtx):
1036 <                        txt += 'VtxSeed=${args['+str(seedIndex)+']}\n'
1037 <                        txt += 'echo "VtxSeed: <$VtxSeed>"\n'
1038 <                        txt += 'sed "s#\<INPUTVTX\>#$VtxSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1039 <                        seedIndex += 1
1040 <                    if (self.sourceSeedG4):
1041 <                        txt += 'G4Seed=${args['+str(seedIndex)+']}\n'
1042 <                        txt += 'echo "G4Seed: <$G4Seed>"\n'
1043 <                        txt += 'sed "s#\<INPUTG4\>#$G4Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1044 <                        seedIndex += 1
1045 <                    if (self.sourceSeedMix):
1046 <                        txt += 'mixSeed=${args['+str(seedIndex)+']}\n'
1047 <                        txt += 'echo "MixSeed: <$mixSeed>"\n'
1048 <                        txt += 'sed "s#\<INPUTMIX\>#$mixSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1049 <                        seedIndex += 1
1050 <                    pass
1051 <                pass
1052 <            txt += 'mv -f '+pset+' pset.cfg\n'
1053 <
1054 <        if len(self.additional_inbox_files) > 0:
1055 <            txt += 'if [ -e $RUNTIME_AREA/'+self.additional_tgz_name+' ] ; then\n'
1056 <            txt += '  tar xzvf $RUNTIME_AREA/'+self.additional_tgz_name+'\n'
1057 <            txt += 'fi\n'
1058 <            pass
765 >            txt += 'cp  $RUNTIME_AREA/'+pkl+' .\n'
766  
767 <        if self.pset != None: #CarlosDaniele
768 <            txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
769 <        
770 <            txt += '\n'
771 <            txt += 'echo "***** cat pset.cfg *********"\n'
772 <            txt += 'cat pset.cfg\n'
773 <            txt += 'echo "****** end pset.cfg ********"\n'
774 <            txt += '\n'
775 <            ### FEDE FOR DBS OUTPUT PUBLICATION
776 <            txt += 'PSETHASH=`EdmConfigHash < pset.cfg` \n'
777 <            txt += 'echo "PSETHASH = $PSETHASH" \n'
1071 <            ##############
767 >            txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
768 >            txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
769 >            txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
770 >            txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
771 >
772 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
773 >            if self.var_filter:
774 >                #print "self.var_filter = ",self.var_filter
775 >                txt += "export var_filter="+"'"+self.var_filter+"'\n"
776 >                txt += 'echo $var_filter'
777 >        else:
778              txt += '\n'
779 <            # txt += 'echo "***** cat pset1.cfg *********"\n'
780 <            # txt += 'cat pset1.cfg\n'
1075 <            # txt += 'echo "****** end pset1.cfg ********"\n'
779 >            if self.AdditionalArgs: txt += 'export AdditionalArgs=\"%s\"\n'%(self.AdditionalArgs)
780 >            if int(self.NumEvents) != 0: txt += 'export MaxEvents=%s\n'%str(self.NumEvents)
781          return txt
782  
783 <    def wsBuildExe(self, nj=0):
783 >    def wsUntarSoftware(self, nj=0):
784          """
785          Put in the script the commands to build an executable
786          or a library.
787          """
788  
789 <        txt = ""
789 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
790  
791          if os.path.isfile(self.tgzNameWithPath):
792 <            txt += 'echo "tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'"\n'
793 <            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
792 >            txt += 'echo ">>> tar xzf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
793 >            if  self.debug_wrapper==1 :
794 >                txt += 'tar zxvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
795 >                txt += 'ls -Al \n'
796 >            else:
797 >                txt += 'tar zxf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
798              txt += 'untar_status=$? \n'
799              txt += 'if [ $untar_status -ne 0 ]; then \n'
800 <            txt += '   echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n'
801 <            txt += '   echo "JOB_EXIT_STATUS = $untar_status" \n'
802 <            txt += '   echo "JobExitCode=$untar_status" | tee -a $RUNTIME_AREA/$repo\n'
1094 <            txt += '   if [ $middleware == OSG ]; then \n'
1095 <            txt += '       echo "Remove working directory: $WORKING_DIR"\n'
1096 <            txt += '       cd $RUNTIME_AREA\n'
1097 <            txt += '       /bin/rm -rf $WORKING_DIR\n'
1098 <            txt += '       if [ -d $WORKING_DIR ] ;then\n'
1099 <            txt += '           echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n'
1100 <            txt += '           echo "JOB_EXIT_STATUS = 50999"\n'
1101 <            txt += '           echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n'
1102 <            txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1103 <            txt += '           rm -f $RUNTIME_AREA/$repo \n'
1104 <            txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1105 <            txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1106 <            txt += '       fi\n'
1107 <            txt += '   fi \n'
1108 <            txt += '   \n'
1109 <            txt += '   exit 1 \n'
800 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
801 >            txt += '   job_exit_code=$untar_status\n'
802 >            txt += '   func_exit\n'
803              txt += 'else \n'
804              txt += '   echo "Successful untar" \n'
805              txt += 'fi \n'
806              txt += '\n'
807 <            txt += 'echo "Include ProdAgentApi and PRODCOMMON in PYTHONPATH"\n'
807 >            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
808              txt += 'if [ -z "$PYTHONPATH" ]; then\n'
809 <            #### FEDE FOR DBS OUTPUT PUBLICATION
1117 <            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon\n'
1118 <            #txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon\n'
1119 <            #txt += '   export PYTHONPATH=ProdAgentApi\n'
809 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
810              txt += 'else\n'
811 <            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n'
1122 <            #txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon:${PYTHONPATH}\n'
1123 <            #txt += '   export PYTHONPATH=ProdAgentApi:${PYTHONPATH}\n'
811 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
812              txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1125            ###################  
813              txt += 'fi\n'
814              txt += '\n'
815  
816              pass
817 <        
817 >
818          return txt
819  
820 <    def modifySteeringCards(self, nj):
820 >    def wsBuildExe(self, nj=0):
821          """
822 <        modify the card provided by the user,
823 <        writing a new card into share dir
822 >        Put in the script the commands to build an executable
823 >        or a library.
824          """
825 <        
825 >
826 >        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
827 >        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
828 >
829 >        txt += 'rm -r lib/ module/ \n'
830 >        txt += 'mv $RUNTIME_AREA/lib/ . \n'
831 >        txt += 'mv $RUNTIME_AREA/module/ . \n'
832 >        if self.dataExist == True:
833 >            txt += 'rm -r src/ \n'
834 >            txt += 'mv $RUNTIME_AREA/src/ . \n'
835 >        if len(self.additional_inbox_files)>0:
836 >            for file in self.additional_inbox_files:
837 >                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
838 >
839 >        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
840 >        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
841 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
842 >        txt += 'else\n'
843 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
844 >        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
845 >        txt += 'fi\n'
846 >        txt += '\n'
847 >
848 >        if self.pset != None:
849 >            psetName = 'pset.py'
850 >
851 >            txt += '\n'
852 >            if self.debug_wrapper == 1:
853 >                txt += 'echo "***** cat ' + psetName + ' *********"\n'
854 >                txt += 'cat ' + psetName + '\n'
855 >                txt += 'echo "****** end ' + psetName + ' ********"\n'
856 >                txt += '\n'
857 >                txt += 'echo "***********************" \n'
858 >                txt += 'which edmConfigHash \n'
859 >                txt += 'echo "***********************" \n'
860 >            txt += 'edmConfigHash ' + psetName + ' \n'
861 >            txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
862 >            txt += 'echo "PSETHASH = $PSETHASH" \n'
863 >            #### FEDE temporary fix for noEdm files #####
864 >            txt += 'if [ -z "$PSETHASH" ]; then \n'
865 >            txt += '   export PSETHASH=null\n'
866 >            txt += 'fi \n'
867 >            #############################################
868 >            txt += '\n'
869 >        return txt
870 >
871 >
872      def executableName(self):
873 <        if self.scriptExe: #CarlosDaniele
873 >        if self.scriptExe:
874              return "sh "
875          else:
876              return self.executable
877  
878      def executableArgs(self):
879 <        if self.scriptExe:#CarlosDaniele
880 <            return   self.scriptExe + " $NJob"
879 >        if self.scriptExe:
880 >            return self.scriptExe + " $NJob $AdditionalArgs"
881          else:
882 <            # if >= CMSSW_1_5_X, add -e
1150 <            version_array = self.scram.getSWVersion().split('_')
1151 <            major = 0
1152 <            minor = 0
1153 <            try:
1154 <                major = int(version_array[1])
1155 <                minor = int(version_array[2])
1156 <            except:
1157 <                msg = "Cannot parse CMSSW version string: " + "_".join(version_array) + " for major and minor release number!"  
1158 <                raise CrabException(msg)
1159 <            if major >= 1 and minor >= 5 :
1160 <                return " -e -p pset.cfg"
1161 <            else:
1162 <                return " -p pset.cfg"
882 >            return " -j $RUNTIME_AREA/crab_fjr_$NJob.xml -p pset.py"
883  
884      def inputSandbox(self, nj):
885          """
886          Returns a list of filenames to be put in JDL input sandbox.
887          """
888          inp_box = []
1169        # # dict added to delete duplicate from input sandbox file list
1170        # seen = {}
1171        ## code
889          if os.path.isfile(self.tgzNameWithPath):
890              inp_box.append(self.tgzNameWithPath)
891 <        if os.path.isfile(self.MLtgzfile):
892 <            inp_box.append(self.MLtgzfile)
893 <        ## config
1177 <        if not self.pset is None:
1178 <            inp_box.append(common.work_space.pathForTgz() + 'job/' + self.configFilename())
1179 <        ## additional input files
1180 <        tgz = self.additionalInputFileTgz()
1181 <        inp_box.append(tgz)
891 >        if os.path.isfile(self.argsFile):
892 >            inp_box.append(self.argsFile)
893 >        inp_box.append(common.work_space.jobDir() + self.scriptName)
894          return inp_box
895  
896      def outputSandbox(self, nj):
# Line 1189 | Line 901 | class Cmssw(JobType):
901  
902          ## User Declared output files
903          for out in (self.output_file+self.output_file_sandbox):
904 <            n_out = nj + 1
905 <            out_box.append(self.numberFile_(out,str(n_out)))
904 >            n_out = nj + 1
905 >            out_box.append(numberFile(out,str(n_out)))
906          return out_box
907  
1196    def prepareSteeringCards(self):
1197        """
1198        Make initial modifications of the user's steering card file.
1199        """
1200        return
908  
909      def wsRenameOutput(self, nj):
910          """
911          Returns part of a job script which renames the produced files.
912          """
913  
914 <        txt = '\n'
915 <        txt += '# directory content\n'
916 <        txt += 'ls \n'
917 <
918 <        txt += 'output_exit_status=0\n'
919 <        
920 <        for fileWithSuffix in (self.output_file_sandbox):
1214 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1215 <            txt += '\n'
1216 <            txt += '# check output file\n'
1217 <            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1218 <            txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA\n'
1219 <            txt += '    cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1220 <            txt += 'else\n'
1221 <            txt += '    exit_status=60302\n'
1222 <            txt += '    echo "ERROR: Problem with output file '+fileWithSuffix+'"\n'
1223 <            if common.scheduler.boss_scheduler_name == 'condor_g':
1224 <                txt += '    if [ $middleware == OSG ]; then \n'
1225 <                txt += '        echo "prepare dummy output file"\n'
1226 <                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1227 <                txt += '    fi \n'
1228 <            txt += 'fi\n'
1229 <        
914 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
915 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
916 >        txt += 'echo ">>> current directory content:"\n'
917 >        if self.debug_wrapper==1:
918 >            txt += 'ls -Al\n'
919 >        txt += '\n'
920 >
921          for fileWithSuffix in (self.output_file):
922 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
922 >            output_file_num = numberFile(fileWithSuffix, '$OutUniqueID')
923              txt += '\n'
924              txt += '# check output file\n'
925              txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
926 <            txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA\n'
927 <            txt += '    cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
926 >            if (self.copy_data == 1):  # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA
927 >                txt += '    mv '+fileWithSuffix+' '+output_file_num+'\n'
928 >                txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
929 >            else:
930 >                txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
931 >                txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
932              txt += 'else\n'
933 <            txt += '    exit_status=60302\n'
934 <            txt += '    echo "ERROR: Problem with output file '+fileWithSuffix+'"\n'
935 <            txt += '    echo "JOB_EXIT_STATUS = $exit_status"\n'
1241 <            txt += '    output_exit_status=$exit_status\n'
1242 <            if common.scheduler.boss_scheduler_name == 'condor_g':
933 >            txt += '    job_exit_code=60302\n'
934 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
935 >            if common.scheduler.name().upper() == 'CONDOR_G':
936                  txt += '    if [ $middleware == OSG ]; then \n'
937                  txt += '        echo "prepare dummy output file"\n'
938                  txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
# Line 1247 | Line 940 | class Cmssw(JobType):
940              txt += 'fi\n'
941          file_list = []
942          for fileWithSuffix in (self.output_file):
943 <             file_list.append(self.numberFile_(fileWithSuffix, '$NJob'))
944 <            
945 <        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
943 >             file_list.append(numberFile('$SOFTWARE_DIR/'+fileWithSuffix, '$OutUniqueID'))
944 >
945 >        txt += 'file_list="'+string.join(file_list,',')+'"\n'
946 >        txt += '\n'
947 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
948 >        txt += 'echo ">>> current directory content:"\n'
949 >        if self.debug_wrapper==1:
950 >            txt += 'ls -Al\n'
951 >        txt += '\n'
952          txt += 'cd $RUNTIME_AREA\n'
953 +        txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
954          return txt
955  
1256    def numberFile_(self, file, txt):
1257        """
1258        append _'txt' before last extension of a file
1259        """
1260        p = string.split(file,".")
1261        # take away last extension
1262        name = p[0]
1263        for x in p[1:-1]:
1264            name=name+"."+x
1265        # add "_txt"
1266        if len(p)>1:
1267            ext = p[len(p)-1]
1268            result = name + '_' + txt + "." + ext
1269        else:
1270            result = name + '_' + txt
1271        
1272        return result
1273
956      def getRequirements(self, nj=[]):
957          """
958 <        return job requirements to add to jdl files
958 >        return job requirements to add to jdl files
959          """
960          req = ''
961          if self.version:
962              req='Member("VO-cms-' + \
963                   self.version + \
964                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
965 <        ## SL add requirement for OS version only if SL4
1284 <        #reSL4 = re.compile( r'slc4' )
1285 <        if self.executable_arch: # and reSL4.search(self.executable_arch):
965 >        if self.executable_arch:
966              req+=' && Member("VO-cms-' + \
967                   self.executable_arch + \
968                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
969  
970          req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
971 +        if ( common.scheduler.name() in ["glite"] ):
972 +            ## 25-Jun-2009 SL: patch to use Cream enabled WMS
973 +            if ( self.cfg_params.get('GRID.use_cream',None) ):
974 +                req += ' && (other.GlueCEStateStatus == "Production" || other.GlueCEStateStatus == "Special")'
975 +            else:
976 +                req += ' && other.GlueCEStateStatus == "Production" '
977  
978          return req
979  
980      def configFilename(self):
981          """ return the config filename """
982 <        return self.name()+'.cfg'
982 >        return self.name()+'.py'
983  
1298    ### OLI_DANIELE
984      def wsSetupCMSOSGEnvironment_(self):
985          """
986          Returns part of a job script which is prepares
987          the execution environment and which is common for all CMS jobs.
988          """
989 <        txt = '\n'
990 <        txt += '   echo "### SETUP CMS OSG  ENVIRONMENT ###"\n'
991 <        txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
992 <        txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
993 <        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
994 <        txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1310 <        txt += '   elif [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
989 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
990 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
991 >        txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
992 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
993 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
994 >        txt += '    if [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
995          txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
996 <        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
997 <        txt += '       source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
998 <        txt += '   else\n'
999 <        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1000 <        txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
1001 <        txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1318 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1319 <        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1320 <        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1321 <        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1322 <        txt += '       exit 1\n'
1323 <        txt += '\n'
1324 <        txt += '       echo "Remove working directory: $WORKING_DIR"\n'
1325 <        txt += '       cd $RUNTIME_AREA\n'
1326 <        txt += '       /bin/rm -rf $WORKING_DIR\n'
1327 <        txt += '       if [ -d $WORKING_DIR ] ;then\n'
1328 <        txt += '           echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1329 <        txt += '           echo "JOB_EXIT_STATUS = 10017"\n'
1330 <        txt += '           echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1331 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1332 <        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1333 <        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1334 <        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1335 <        txt += '       fi\n'
1336 <        txt += '\n'
1337 <        txt += '       exit 1\n'
1338 <        txt += '   fi\n'
996 >        txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
997 >        txt += '    else\n'
998 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
999 >        txt += '        job_exit_code=10020\n'
1000 >        txt += '        func_exit\n'
1001 >        txt += '    fi\n'
1002          txt += '\n'
1003 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1004 <        txt += '   echo " END SETUP CMS OSG  ENVIRONMENT "\n'
1003 >        txt += '    echo "==> setup cms environment ok"\n'
1004 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
1005  
1006          return txt
1007 <
1345 <    ### OLI_DANIELE
1007 >
1008      def wsSetupCMSLCGEnvironment_(self):
1009          """
1010          Returns part of a job script which is prepares
1011          the execution environment and which is common for all CMS jobs.
1012          """
1013 <        txt  = '   \n'
1014 <        txt += '   echo " ### SETUP CMS LCG  ENVIRONMENT ### "\n'
1015 <        txt += '   if [ ! $VO_CMS_SW_DIR ] ;then\n'
1016 <        txt += '       echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n'
1017 <        txt += '       echo "JOB_EXIT_STATUS = 10031" \n'
1018 <        txt += '       echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
1019 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1020 <        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1021 <        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1022 <        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1023 <        txt += '       exit 1\n'
1024 <        txt += '   else\n'
1025 <        txt += '       echo "Sourcing environment... "\n'
1026 <        txt += '       if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1027 <        txt += '           echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1028 <        txt += '           echo "JOB_EXIT_STATUS = 10020"\n'
1029 <        txt += '           echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1030 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1031 <        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1032 <        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1033 <        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1034 <        txt += '           exit 1\n'
1035 <        txt += '       fi\n'
1036 <        txt += '       echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1037 <        txt += '       source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1038 <        txt += '       result=$?\n'
1039 <        txt += '       if [ $result -ne 0 ]; then\n'
1378 <        txt += '           echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1379 <        txt += '           echo "JOB_EXIT_STATUS = 10032"\n'
1380 <        txt += '           echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1381 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1382 <        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1383 <        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1384 <        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1385 <        txt += '           exit 1\n'
1386 <        txt += '       fi\n'
1387 <        txt += '   fi\n'
1388 <        txt += '   \n'
1389 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1390 <        txt += '   echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
1013 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
1014 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
1015 >        txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
1016 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
1017 >        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
1018 >        txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
1019 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
1020 >        txt += '        job_exit_code=10031\n'
1021 >        txt += '        func_exit\n'
1022 >        txt += '    else\n'
1023 >        txt += '        echo "Sourcing environment... "\n'
1024 >        txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1025 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1026 >        txt += '            job_exit_code=10020\n'
1027 >        txt += '            func_exit\n'
1028 >        txt += '        fi\n'
1029 >        txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1030 >        txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1031 >        txt += '        result=$?\n'
1032 >        txt += '        if [ $result -ne 0 ]; then\n'
1033 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1034 >        txt += '            job_exit_code=10032\n'
1035 >        txt += '            func_exit\n'
1036 >        txt += '        fi\n'
1037 >        txt += '    fi\n'
1038 >        txt += '    \n'
1039 >        txt += '    echo "==> setup cms environment ok"\n'
1040          return txt
1041  
1042 <    ### FEDE FOR DBS OUTPUT PUBLICATION
1394 <    def modifyReport(self, nj):
1042 >    def wsModifyReport(self, nj):
1043          """
1044 <        insert the part of the script that modifies the FrameworkJob Report
1044 >        insert the part of the script that modifies the FrameworkJob Report
1045          """
1046  
1047 <        txt = ''
1048 <        try:
1049 <            publish_data = int(self.cfg_params['USER.publish_data'])          
1050 <        except KeyError:
1051 <            publish_data = 0
1052 <        if (publish_data == 1):  
1405 <            txt += 'echo "Modify Job Report" \n'
1406 <            #txt += 'chmod a+x $RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1407 <            ################ FEDE FOR DBS2 #############################################
1408 <            txt += 'chmod a+x $SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1409 <            #############################################################################
1410 <            #try:
1411 <            #    publish_data = int(self.cfg_params['USER.publish_data'])          
1412 <            #except KeyError:
1413 <            #    publish_data = 0
1414 <
1415 <            txt += 'if [ -z "$SE" ]; then\n'
1416 <            txt += '    SE="" \n'
1417 <            txt += 'fi \n'
1418 <            txt += 'if [ -z "$SE_PATH" ]; then\n'
1419 <            txt += '    SE_PATH="" \n'
1420 <            txt += 'fi \n'
1421 <            txt += 'echo "SE = $SE"\n'
1422 <            txt += 'echo "SE_PATH = $SE_PATH"\n'
1423 <
1424 <        #if (publish_data == 1):  
1425 <            #processedDataset = self.cfg_params['USER.processed_datasetname']
1426 <            processedDataset = self.cfg_params['USER.publish_data_name']
1427 <            txt += 'ProcessedDataset='+processedDataset+'\n'
1428 <            #### LFN=/store/user/<user>/processedDataset_PSETHASH
1429 <            txt += 'if [ "$SE_PATH" == "" ]; then\n'
1430 <            #### FEDE: added slash in LFN ##############
1431 <            txt += '    FOR_LFN=/copy_problems/ \n'
1432 <            txt += 'else \n'
1433 <            txt += '    tmp=`echo $SE_PATH | awk -F \'store\' \'{print$2}\'` \n'
1434 <            #####  FEDE TO BE CHANGED, BECAUSE STORE IS HARDCODED!!!! ########
1435 <            txt += '    FOR_LFN=/store$tmp \n'
1436 <            txt += 'fi \n'
1437 <            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1438 <            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1047 >        txt = ''
1048 >        if (self.copy_data == 1):
1049 >            txt = '\n#Written by cms_cmssw::wsModifyReport\n'
1050 >
1051 >            txt += 'echo ">>> Modify Job Report:" \n'
1052 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
1053              txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1054 <            #txt += 'echo "$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1055 <            txt += 'echo "$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1056 <            txt += '$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1057 <            #txt += '$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1058 <      
1054 >
1055 >            args = 'fjr $RUNTIME_AREA/crab_fjr_$NJob.xml json $RUNTIME_AREA/resultCopyFile n_job $OutUniqueID PrimaryDataset $PrimaryDataset  ApplicationFamily $ApplicationFamily ApplicationName $executable cmssw_version $CMSSW_VERSION psethash $PSETHASH'
1056 >
1057 >            if (self.publish_data == 1):
1058 >                txt += 'ProcessedDataset='+self.processedDataset+'\n'
1059 >                txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1060 >                args += ' UserProcessedDataset $USER-$ProcessedDataset-$PSETHASH'
1061 >
1062 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'"\n'
1063 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'\n'
1064              txt += 'modifyReport_result=$?\n'
1446            txt += 'echo modifyReport_result = $modifyReport_result\n'
1065              txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1066 <            txt += '    exit_status=1\n'
1067 <            txt += '    echo "ERROR: Problem with ModifyJobReport"\n'
1066 >            txt += '    modifyReport_result=70500\n'
1067 >            txt += '    job_exit_code=$modifyReport_result\n'
1068 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
1069 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
1070              txt += 'else\n'
1071 <            txt += '    mv NewFrameworkJobReport.xml crab_fjr_$NJob.xml\n'
1071 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1072              txt += 'fi\n'
1453        else:
1454            txt += 'echo "no data publication required"\n'
1455            #txt += 'ProcessedDataset=no_data_to_publish \n'
1456            #### FEDE: added slash in LFN ##############
1457            #txt += 'FOR_LFN=/local/ \n'
1458            #txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1459            #txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1073          return txt
1074  
1075 <    def cleanEnv(self):
1076 <        ### OLI_DANIELE
1077 <        txt = ''
1078 <        txt += 'if [ $middleware == OSG ]; then\n'  
1079 <        txt += '    cd $RUNTIME_AREA\n'
1080 <        txt += '    echo "Remove working directory: $WORKING_DIR"\n'
1081 <        txt += '    /bin/rm -rf $WORKING_DIR\n'
1082 <        txt += '    if [ -d $WORKING_DIR ] ;then\n'
1083 <        txt += '              echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1084 <        txt += '              echo "JOB_EXIT_STATUS = 60999"\n'
1085 <        txt += '              echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1086 <        txt += '              dumpStatus $RUNTIME_AREA/$repo\n'
1087 <        txt += '        rm -f $RUNTIME_AREA/$repo \n'
1088 <        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1089 <        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1075 >    def wsParseFJR(self):
1076 >        """
1077 >        Parse the FrameworkJobReport to obtain useful infos
1078 >        """
1079 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
1080 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
1081 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1082 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1083 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1084 >        if self.debug_wrapper==1 :
1085 >            txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1086 >        txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1087 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1088 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1089 >        txt += '            cat $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1090 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1091 >        txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1092 >        txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
1093 >        txt += '        else\n'
1094 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1095 >        txt += '        fi\n'
1096 >        txt += '    else\n'
1097 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1098          txt += '    fi\n'
1099 +          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1100 +        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1101 +        txt += '        echo ">>> Executable succeded  $executable_exit_status"\n'
1102 +        txt += '    fi\n'
1103 +        txt += 'else\n'
1104 +        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1105          txt += 'fi\n'
1106          txt += '\n'
1107 +        txt += 'if [ $executable_exit_status -ne 0 ];then\n'
1108 +        txt += '    echo ">>> Executable failed  $executable_exit_status"\n'
1109 +        txt += '    echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1110 +        txt += '    echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1111 +        txt += '    job_exit_code=$executable_exit_status\n'
1112 +        txt += '    func_exit\n'
1113 +        txt += 'fi\n\n'
1114 +        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1115 +        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1116 +        txt += 'job_exit_code=$executable_exit_status\n'
1117 +
1118          return txt
1119  
1120      def setParam_(self, param, value):
# Line 1485 | Line 1123 | class Cmssw(JobType):
1123      def getParams(self):
1124          return self._params
1125  
1126 <    def setTaskid_(self):
1489 <        self._taskId = self.cfg_params['taskId']
1490 <        
1491 <    def getTaskid(self):
1492 <        return self._taskId
1493 <
1494 <    def uniquelist(self, old):
1126 >    def outList(self,list=False):
1127          """
1128 <        remove duplicates from a list
1128 >        check the dimension of the output files
1129          """
1130 <        nd={}
1131 <        for e in old:
1132 <            nd[e]=0
1133 <        return nd.keys()
1130 >        txt = ''
1131 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1132 >        listOutFiles = []
1133 >        stdout = 'CMSSW_$NJob.stdout'
1134 >        stderr = 'CMSSW_$NJob.stderr'
1135 >        if len(self.output_file) <= 0:
1136 >            msg ="WARNING: no output files name have been defined!!\n"
1137 >            msg+="\tno output files will be reported back/staged\n"
1138 >            common.logger.info(msg)
1139 >
1140 >        if (self.return_data == 1):
1141 >            for file in (self.output_file):
1142 >                listOutFiles.append(numberFile(file, '$OutUniqueID'))
1143 >        for file in (self.output_file_sandbox):
1144 >            listOutFiles.append(numberFile(file, '$NJob'))
1145 >        listOutFiles.append(stdout)
1146 >        listOutFiles.append(stderr)
1147 >
1148 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1149 >        txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1150 >        txt += 'export filesToCheck\n'
1151 >        taskinfo={}
1152 >        taskinfo['outfileBasename'] = self.output_file
1153 >        common._db.updateTask_(taskinfo)
1154  
1155 +        if list : return self.output_file
1156 +        return txt
1157  
1158 <    def checkOut(self, limit):
1158 >    def checkCMSSWVersion(self, url = "https://cmstags.cern.ch/cgi-bin/CmsTC/", fileName = "ReleasesXML"):
1159          """
1160 <        check the dimension of the output files
1507 <        """
1508 <        txt = 'echo "*****************************************"\n'
1509 <        txt += 'echo "** Starting output sandbox limit check **"\n'
1510 <        txt += 'echo "*****************************************"\n'
1511 <        allOutFiles = ""
1512 <        listOutFiles = []
1513 <        for fileOut in (self.output_file+self.output_file_sandbox):
1514 <             if fileOut.find('crab_fjr') == -1:
1515 <                 allOutFiles = allOutFiles + " " + self.numberFile_(fileOut, '$NJob')
1516 <                 listOutFiles.append(self.numberFile_(fileOut, '$NJob'))
1517 <        txt += 'echo "OUTPUT files: '+str(allOutFiles)+'";\n'
1518 <        txt += 'ls -gGhrta;\n'
1519 <        txt += 'sum=0;\n'
1520 <        txt += 'for file in '+str(allOutFiles)+' ; do\n'
1521 <        txt += '    if [ -e $file ]; then\n'
1522 <        txt += '        tt=`ls -gGrta $file | awk \'{ print $3 }\'`\n'
1523 <        txt += '        sum=`expr $sum + $tt`\n'
1524 <        txt += '    else\n'
1525 <        txt += '        echo "WARNING: output file $file not found!"\n'
1526 <        txt += '    fi\n'
1527 <        txt += 'done\n'
1528 <        txt += 'echo "Total Output dimension: $sum";\n'
1529 <        txt += 'limit='+str(limit)+';\n'
1530 <        txt += 'echo "OUTPUT FILES LIMIT SET TO: $limit";\n'
1531 <        txt += 'if [ $limit -lt $sum ]; then\n'
1532 <        txt += '    echo "WARNING: output files have to big size - something will be lost;"\n'
1533 <        txt += '    echo "         checking the output file sizes..."\n'
1534 <        """
1535 <        txt += '    dim=0;\n'
1536 <        txt += '    exclude=0;\n'
1537 <        txt += '    for files in '+str(allOutFiles)+' ; do\n'
1538 <        txt += '        sumTemp=0;\n'
1539 <        txt += '        for file2 in '+str(allOutFiles)+' ; do\n'
1540 <        txt += '            if [ $file != $file2 ]; then\n'
1541 <        txt += '                tt=`ls -gGrta $file2 | awk \'{ print $3 }\';`\n'
1542 <        txt += '                sumTemp=`expr $sumTemp + $tt`;\n'
1543 <        txt += '            fi\n'
1544 <        txt += '        done\n'
1545 <        txt += '        if [ $sumTemp -lt $limit ]; then\n'
1546 <        txt += '            if [ $dim -lt $sumTemp ]; then\n'
1547 <        txt += '                dim=$sumTemp;\n'
1548 <        txt += '                exclude=$file;\n'
1549 <        txt += '            fi\n'
1550 <        txt += '        fi\n'
1551 <        txt += '    done\n'
1552 <        txt += '    echo "Dimension calculated: $dim"; echo "File to exclude: $exclude";\n'
1160 >        compare current CMSSW release and arch with allowed releases
1161          """
1162 <        txt += '    tot=0;\n'
1163 <        txt += '    for file2 in '+str(allOutFiles)+' ; do\n'
1164 <        txt += '        tt=`ls -gGrta $file2 | awk \'{ print $3 }\';`\n'
1165 <        txt += '        tot=`expr $tot + $tt`;\n'
1166 <        txt += '        if [ $limit -lt $tot ]; then\n'
1167 <        txt += '            tot=`expr $tot - $tt`;\n'
1168 <        txt += '            fileLast=$file;\n'
1169 <        txt += '            break;\n'
1170 <        txt += '        fi\n'
1171 <        txt += '    done\n'
1172 <        txt += '    echo "Dimension calculated: $tot"; echo "First file to exclude: $file";\n'
1173 <        txt += '    flag=0;\n'    
1174 <        txt += '    for filess in '+str(allOutFiles)+' ; do\n'
1175 <        txt += '        if [ $fileLast = $filess ]; then\n'
1176 <        txt += '            flag=1;\n'
1177 <        txt += '        fi\n'
1178 <        txt += '        if [ $flag -eq 1 ]; then\n'
1179 <        txt += '            rm -f $filess;\n'
1180 <        txt += '        fi\n'
1181 <        txt += '    done\n'
1182 <        txt += '    ls -agGhrt;\n'
1183 <        txt += '    echo "WARNING: output files are too big in dimension: can not put in the output_sandbox.";\n'
1184 <        txt += '    echo "JOB_EXIT_STATUS = 70000";\n'
1185 <        txt += '    exit_status=70000;\n'
1186 <        txt += 'else'
1187 <        txt += '    echo "Total Output dimension $sum is fine.";\n'
1188 <        txt += 'fi\n'
1189 <        txt += 'echo "*****************************************"\n'
1190 <        txt += 'echo "*** Ending output sandbox limit check ***"\n'
1191 <        txt += 'echo "*****************************************"\n'
1192 <        return txt
1162 >
1163 >        downloader = Downloader(url)
1164 >        goodRelease = False
1165 >
1166 >        try:
1167 >            result = downloader.config(fileName)
1168 >        except:
1169 >            common.logger.info("ERROR: Problem reading file of allowed CMSSW releases.")
1170 >
1171 >        try:
1172 >            events = pulldom.parseString(result)
1173 >
1174 >            arch     = None
1175 >            release  = None
1176 >            relType  = None
1177 >            relState = None
1178 >            for (event, node) in events:
1179 >                if event == pulldom.START_ELEMENT:
1180 >                    if node.tagName == 'architecture':
1181 >                        arch = node.attributes.getNamedItem('name').nodeValue
1182 >                    if node.tagName == 'project':
1183 >                        relType = node.attributes.getNamedItem('type').nodeValue
1184 >                        relState = node.attributes.getNamedItem('state').nodeValue
1185 >                        if relType == 'Production' and relState == 'Announced':
1186 >                            release = node.attributes.getNamedItem('label').nodeValue
1187 >                if self.executable_arch == arch and self.version == release:
1188 >                    goodRelease = True
1189 >                    return goodRelease
1190 >
1191 >            if not goodRelease:
1192 >                msg = "WARNING: %s on %s is not a supported release. " % \
1193 >                        (self.version, self.executable_arch)
1194 >                msg += "Submission may fail."
1195 >                common.logger.info(msg)
1196 >        except:
1197 >            common.logger.info("Problems parsing file of allowed CMSSW releases.")
1198 >
1199 >        return goodRelease
1200 >

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines