ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.128 by fanzago, Thu Oct 11 16:23:44 2007 UTC vs.
Revision 1.300 by spiga, Wed May 20 13:51:45 2009 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 from BlackWhiteListParser import BlackWhiteListParser
5   import common
6   import Scram
7 + from Splitter import JobSplitter
8  
9 + from IMProv.IMProvNode import IMProvNode
10   import os, string, glob
11  
12   class Cmssw(JobType):
13 <    def __init__(self, cfg_params, ncjobs):
13 >    def __init__(self, cfg_params, ncjobs,skip_blocks, isNew):
14          JobType.__init__(self, 'CMSSW')
15          common.logger.debug(3,'CMSSW::__init__')
16 +        self.skip_blocks = skip_blocks
17 +        self.argsList = 1
18  
19          self._params = {}
20          self.cfg_params = cfg_params
21  
22 <        # init BlackWhiteListParser
23 <        self.blackWhiteListParser = BlackWhiteListParser(cfg_params)
24 <
25 <        try:
26 <            self.MaxTarBallSize = float(self.cfg_params['EDG.maxtarballsize'])
27 <        except KeyError:
25 <            self.MaxTarBallSize = 9.5
22 >        ### Temporary patch to automatically skip the ISB size check:
23 >        server=self.cfg_params.get('CRAB.server_name',None)
24 >        size = 9.5
25 >        if server or common.scheduler.name().upper() in ['LSF','CAF']: size = 99999
26 >        ### D.S.
27 >        self.MaxTarBallSize = float(self.cfg_params.get('EDG.maxtarballsize',size))
28  
29          # number of jobs requested to be created, limit obj splitting
30          self.ncjobs = ncjobs
31  
32          log = common.logger
33 <        
33 >
34          self.scram = Scram.Scram(cfg_params)
35          self.additional_inbox_files = []
36          self.scriptExe = ''
37          self.executable = ''
38          self.executable_arch = self.scram.getArch()
39 <        self.tgz_name = 'default.tgz'
40 <        self.additional_tgz_name = 'additional.tgz'
39 >        self.tgz_name = 'default.tar.gz'
40 >        self.tar_name = 'default.tar'
41          self.scriptName = 'CMSSW.sh'
42 <        self.pset = ''      #scrip use case Da  
43 <        self.datasetPath = '' #scrip use case Da
42 >        self.pset = ''
43 >        self.datasetPath = ''
44  
45 +        self.tgzNameWithPath = common.work_space.pathForTgz()+self.tgz_name
46          # set FJR file name
47          self.fjrFileName = 'crab_fjr.xml'
48  
49          self.version = self.scram.getSWVersion()
50 <        
51 <        #
52 <        # Try to block creation in case of arch/version mismatch
53 <        #
54 <
52 <        a = string.split(self.version, "_")
53 <
54 <        if int(a[1]) == 1 and (int(a[2]) < 5 and self.executable_arch.find('slc4') == 0):
55 <            msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch)
50 >        common.logger.write("CMSSW version is: "+str(self.version))
51 >        try:
52 >            type, self.CMSSW_major, self.CMSSW_minor, self.CMSSW_patch = tuple(self.version.split('_'))
53 >        except:
54 >            msg = "Cannot parse CMSSW version string: " + self.version + " for major and minor release number!"
55              raise CrabException(msg)
56 <        if int(a[1]) == 1 and (int(a[2]) >= 5 and self.executable_arch.find('slc3') == 0):
57 <            msg = "Error: CMS does not support %s with %s architecture"%(self.version, self.executable_arch)
56 >
57 >        if self.CMSSW_major < 1 or (self.CMSSW_major == 1 and self.CMSSW_minor < 5):
58 >            msg = "CRAB supports CMSSW >= 1_5_x only. Use an older CRAB version."
59              raise CrabException(msg)
60 <        
61 <        common.taskDB.setDict('codeVersion',self.version)
62 <        self.setParam_('application', self.version)
60 >            """
61 >            As CMSSW versions are dropped we can drop more code:
62 >            1.X dropped: drop support for running .cfg on WN
63 >            2.0 dropped: drop all support for cfg here and in writeCfg
64 >            2.0 dropped: Recheck the random number seed support
65 >            """
66  
67          ### collect Data cards
68  
66        ## get DBS mode
67        try:
68            self.use_dbs_1 = int(self.cfg_params['CMSSW.use_dbs_1'])
69        except KeyError:
70            self.use_dbs_1 = 0
71            
72        try:
73            tmp =  cfg_params['CMSSW.datasetpath']
74            log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
75            if string.lower(tmp)=='none':
76                self.datasetPath = None
77                self.selectNoInput = 1
78            else:
79                self.datasetPath = tmp
80                self.selectNoInput = 0
81        except KeyError:
82            msg = "Error: datasetpath not defined "  
83            raise CrabException(msg)
69  
70 <        # ML monitoring
71 <        # split dataset path style: /PreProdR3Minbias/SIM/GEN-SIM
72 <        if not self.datasetPath:
73 <            self.setParam_('dataset', 'None')
74 <            self.setParam_('owner', 'None')
70 >        ### Temporary: added to remove input file control in the case of PU
71 >        self.dataset_pu = cfg_params.get('CMSSW.dataset_pu', None)
72 >
73 >        tmp =  cfg_params['CMSSW.datasetpath']
74 >        log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
75 >
76 >        if tmp =='':
77 >            msg = "Error: datasetpath not defined "
78 >            raise CrabException(msg)
79 >        elif string.lower(tmp)=='none':
80 >            self.datasetPath = None
81 >            self.selectNoInput = 1
82          else:
83 <            try:
84 <                datasetpath_split = self.datasetPath.split("/")
93 <                # standard style
94 <                self.setParam_('datasetFull', self.datasetPath)
95 <                if self.use_dbs_1 == 1 :
96 <                    self.setParam_('dataset', datasetpath_split[1])
97 <                    self.setParam_('owner', datasetpath_split[-1])
98 <                else:
99 <                    self.setParam_('dataset', datasetpath_split[1])
100 <                    self.setParam_('owner', datasetpath_split[2])
101 <            except:
102 <                self.setParam_('dataset', self.datasetPath)
103 <                self.setParam_('owner', self.datasetPath)
104 <                
105 <        self.setTaskid_()
106 <        self.setParam_('taskId', self.cfg_params['taskId'])
83 >            self.datasetPath = tmp
84 >            self.selectNoInput = 0
85  
86          self.dataTiers = []
87  
88 +        self.debugWrap=''
89 +        self.debug_wrapper = int(cfg_params.get('USER.debug_wrapper',0))
90 +        if self.debug_wrapper == 1: self.debugWrap='--debug'
91 +
92          ## now the application
93 <        try:
94 <            self.executable = cfg_params['CMSSW.executable']
95 <            self.setParam_('exe', self.executable)
96 <            log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
115 <            msg = "Default executable cmsRun overridden. Switch to " + self.executable
116 <            log.debug(3,msg)
117 <        except KeyError:
118 <            self.executable = 'cmsRun'
119 <            self.setParam_('exe', self.executable)
120 <            msg = "User executable not defined. Use cmsRun"
121 <            log.debug(3,msg)
122 <            pass
93 >        self.managedGenerators = ['madgraph','comphep']
94 >        self.generator = cfg_params.get('CMSSW.generator','pythia').lower()
95 >        self.executable = cfg_params.get('CMSSW.executable','cmsRun')
96 >        log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
97  
98 <        try:
125 <            self.pset = cfg_params['CMSSW.pset']
126 <            log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
127 <            if self.pset.lower() != 'none' :
128 <                if (not os.path.exists(self.pset)):
129 <                    raise CrabException("User defined PSet file "+self.pset+" does not exist")
130 <            else:
131 <                self.pset = None
132 <        except KeyError:
98 >        if not cfg_params.has_key('CMSSW.pset'):
99              raise CrabException("PSet file missing. Cannot run cmsRun ")
100 +        self.pset = cfg_params['CMSSW.pset']
101 +        log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
102 +        if self.pset.lower() != 'none' :
103 +            if (not os.path.exists(self.pset)):
104 +                raise CrabException("User defined PSet file "+self.pset+" does not exist")
105 +        else:
106 +            self.pset = None
107  
108          # output files
109          ## stuff which must be returned always via sandbox
# Line 140 | Line 113 | class Cmssw(JobType):
113          self.output_file_sandbox.append(self.fjrFileName)
114  
115          # other output files to be returned via sandbox or copied to SE
116 <        try:
117 <            self.output_file = []
118 <            tmp = cfg_params['CMSSW.output_file']
119 <            if tmp != '':
120 <                tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
121 <                log.debug(7, 'cmssw::cmssw(): output files '+str(tmpOutFiles))
122 <                for tmp in tmpOutFiles:
123 <                    tmp=string.strip(tmp)
151 <                    self.output_file.append(tmp)
152 <                    pass
153 <            else:
154 <                log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
155 <                pass
156 <            pass
157 <        except KeyError:
158 <            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
159 <            pass
116 >        outfileflag = False
117 >        self.output_file = []
118 >        tmp = cfg_params.get('CMSSW.output_file',None)
119 >        if tmp :
120 >            self.output_file = [x.strip() for x in tmp.split(',')]
121 >            outfileflag = True #output found
122 >        #else:
123 >        #    log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
124  
125          # script_exe file as additional file in inputSandbox
126 <        try:
127 <            self.scriptExe = cfg_params['USER.script_exe']
128 <            if self.scriptExe != '':
129 <               if not os.path.isfile(self.scriptExe):
130 <                  msg ="ERROR. file "+self.scriptExe+" not found"
131 <                  raise CrabException(msg)
168 <               self.additional_inbox_files.append(string.strip(self.scriptExe))
169 <        except KeyError:
170 <            self.scriptExe = ''
126 >        self.scriptExe = cfg_params.get('USER.script_exe',None)
127 >        if self.scriptExe :
128 >            if not os.path.isfile(self.scriptExe):
129 >                msg ="ERROR. file "+self.scriptExe+" not found"
130 >                raise CrabException(msg)
131 >            self.additional_inbox_files.append(string.strip(self.scriptExe))
132  
172        #CarlosDaniele
133          if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
134 <           msg ="Error. script_exe  not defined"
135 <           raise CrabException(msg)
134 >            msg ="Error. script_exe  not defined"
135 >            raise CrabException(msg)
136 >
137 >        # use parent files...
138 >        self.useParent = int(self.cfg_params.get('CMSSW.use_parent',0))
139  
140          ## additional input files
141 <        try:
141 >        if cfg_params.has_key('USER.additional_input_files'):
142              tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
143              for tmp in tmpAddFiles:
144                  tmp = string.strip(tmp)
# Line 192 | Line 155 | class Cmssw(JobType):
155                      if not os.path.exists(file):
156                          raise CrabException("Additional input file not found: "+file)
157                      pass
195                    # fname = string.split(file, '/')[-1]
196                    # storedFile = common.work_space.pathForTgz()+'share/'+fname
197                    # shutil.copyfile(file, storedFile)
158                      self.additional_inbox_files.append(string.strip(file))
159                  pass
160              pass
161              common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
162 <        except KeyError:
203 <            pass
204 <
205 <        # files per job
206 <        try:
207 <            if (cfg_params['CMSSW.files_per_jobs']):
208 <                raise CrabException("files_per_jobs no longer supported.  Quitting.")
209 <        except KeyError:
210 <            pass
211 <
212 <        ## Events per job
213 <        try:
214 <            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
215 <            self.selectEventsPerJob = 1
216 <        except KeyError:
217 <            self.eventsPerJob = -1
218 <            self.selectEventsPerJob = 0
219 <    
220 <        ## number of jobs
221 <        try:
222 <            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
223 <            self.selectNumberOfJobs = 1
224 <        except KeyError:
225 <            self.theNumberOfJobs = 0
226 <            self.selectNumberOfJobs = 0
227 <
228 <        try:
229 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
230 <            self.selectTotalNumberEvents = 1
231 <        except KeyError:
232 <            self.total_number_of_events = 0
233 <            self.selectTotalNumberEvents = 0
234 <
235 <        if self.pset != None: #CarlosDaniele
236 <             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
237 <                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
238 <                 raise CrabException(msg)
239 <        else:
240 <             if (self.selectNumberOfJobs == 0):
241 <                 msg = 'Must specify  number_of_jobs.'
242 <                 raise CrabException(msg)
243 <
244 <        ## source seed for pythia
245 <        try:
246 <            self.sourceSeed = int(cfg_params['CMSSW.pythia_seed'])
247 <        except KeyError:
248 <            self.sourceSeed = None
249 <            common.logger.debug(5,"No seed given")
250 <
251 <        try:
252 <            self.sourceSeedVtx = int(cfg_params['CMSSW.vtx_seed'])
253 <        except KeyError:
254 <            self.sourceSeedVtx = None
255 <            common.logger.debug(5,"No vertex seed given")
256 <
257 <        try:
258 <            self.sourceSeedG4 = int(cfg_params['CMSSW.g4_seed'])
259 <        except KeyError:
260 <            self.sourceSeedG4 = None
261 <            common.logger.debug(5,"No g4 sim hits seed given")
262 <
263 <        try:
264 <            self.sourceSeedMix = int(cfg_params['CMSSW.mix_seed'])
265 <        except KeyError:
266 <            self.sourceSeedMix = None
267 <            common.logger.debug(5,"No mix seed given")
162 >        pass
163  
269        try:
270            self.firstRun = int(cfg_params['CMSSW.first_run'])
271        except KeyError:
272            self.firstRun = None
273            common.logger.debug(5,"No first run given")
274        if self.pset != None: #CarlosDaniele
275            ver = string.split(self.version,"_")
276            if (int(ver[1])>=1 and int(ver[2])>=5):
277                import PsetManipulator150 as pp
278            else:
279                import PsetManipulator as pp
280            PsetEdit = pp.PsetManipulator(self.pset) #Daniele Pset
164  
165 +        ## New method of dealing with seeds
166 +        self.incrementSeeds = []
167 +        self.preserveSeeds = []
168 +        if cfg_params.has_key('CMSSW.preserve_seeds'):
169 +            tmpList = cfg_params['CMSSW.preserve_seeds'].split(',')
170 +            for tmp in tmpList:
171 +                tmp.strip()
172 +                self.preserveSeeds.append(tmp)
173 +        if cfg_params.has_key('CMSSW.increment_seeds'):
174 +            tmpList = cfg_params['CMSSW.increment_seeds'].split(',')
175 +            for tmp in tmpList:
176 +                tmp.strip()
177 +                self.incrementSeeds.append(tmp)
178 +
179 +        self.firstRun = cfg_params.get('CMSSW.first_run',None)
180 +
181 +        # Copy/return
182 +        self.copy_data = int(cfg_params.get('USER.copy_data',0))
183 +        self.return_data = int(cfg_params.get('USER.return_data',0))
184 +
185 +        self.conf = {}
186 +        self.conf['pubdata'] = None
187 +        # number of jobs requested to be created, limit obj splitting DD
188          #DBSDLS-start
189 <        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
189 >        ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
190          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
191          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
192          self.jobDestination=[]  # Site destination(s) for each job (list of lists)
# Line 289 | Line 195 | class Cmssw(JobType):
195          blockSites = {}
196          if self.datasetPath:
197              blockSites = self.DataDiscoveryAndLocation(cfg_params)
198 <        #DBSDLS-end          
198 >        #DBSDLS-end
199 >        self.conf['blockSites']=blockSites
200  
294        self.tgzNameWithPath = self.getTarBall(self.executable)
295    
201          ## Select Splitting
202 <        if self.selectNoInput:
203 <            if self.pset == None: #CarlosDaniele
204 <                self.jobSplittingForScript()
202 >        splitByRun = int(cfg_params.get('CMSSW.split_by_run',0))
203 >
204 >        if self.selectNoInput:
205 >            if self.pset == None:
206 >                self.algo = 'ForScript'
207              else:
208 <                self.jobSplittingNoInput()
208 >                self.algo = 'NoInput'
209 >                self.conf['managedGenerators']=self.managedGenerators
210 >                self.conf['generator']=self.generator
211 >        elif splitByRun ==1:
212 >            self.algo = 'RunBased'
213          else:
214 <            self.jobSplittingByBlocks(blockSites)
214 >            self.algo = 'EventBased'
215 >
216 > #        self.algo = 'LumiBased'
217 >        splitter = JobSplitter(self.cfg_params,self.conf)
218 >        self.dict = splitter.Algos()[self.algo]()
219 >
220 >        self.argsFile= '%s/arguments.xml'%common.work_space.shareDir()
221 >        self.rootArgsFilename= 'arguments'
222 >        # modify Pset only the first time
223 >        if (isNew and self.pset != None): self.ModifyPset()
224 >
225 >        ## Prepare inputSandbox TarBall (only the first time)
226 >        self.tarNameWithPath = self.getTarBall(self.executable)
227 >
228 >
229 >    def ModifyPset(self):
230 >        import PsetManipulator as pp
231 >        PsetEdit = pp.PsetManipulator(self.pset)
232 >        try:
233 >            # Add FrameworkJobReport to parameter-set, set max events.
234 >            # Reset later for data jobs by writeCFG which does all modifications
235 >            PsetEdit.maxEvent(1)
236 >            PsetEdit.skipEvent(0)
237 >            PsetEdit.psetWriter(self.configFilename())
238 >            ## If present, add TFileService to output files
239 >            if not int(self.cfg_params.get('CMSSW.skip_TFileService_output',0)):
240 >                tfsOutput = PsetEdit.getTFileService()
241 >                if tfsOutput:
242 >                    if tfsOutput in self.output_file:
243 >                        common.logger.debug(5,"Output from TFileService "+tfsOutput+" already in output files")
244 >                    else:
245 >                        outfileflag = True #output found
246 >                        self.output_file.append(tfsOutput)
247 >                        common.logger.message("Adding "+tfsOutput+" (from TFileService) to list of output files")
248 >                    pass
249 >                pass
250 >            ## If present and requested, add PoolOutputModule to output files
251 >            if int(self.cfg_params.get('CMSSW.get_edm_output',0)):
252 >                edmOutput = PsetEdit.getPoolOutputModule()
253 >                if edmOutput:
254 >                    if edmOutput in self.output_file:
255 >                        common.logger.debug(5,"Output from PoolOutputModule "+edmOutput+" already in output files")
256 >                    else:
257 >                        self.output_file.append(edmOutput)
258 >                        common.logger.message("Adding "+edmOutput+" (from PoolOutputModule) to list of output files")
259 >                    pass
260 >                pass
261 >            # not required: check anyhow if present, to avoid accidental T2 overload
262 >            else:
263 >                edmOutput = PsetEdit.getPoolOutputModule()
264 >                if edmOutput and (edmOutput not in self.output_file):
265 >                    msg = "ERROR: a PoolOutputModule is present in your ParameteSet %s \n"%self.pset
266 >                    msg +="         but the file produced ( %s ) is not in the list of output files\n"%edmOutput
267 >                    msg += "WARNING: please remove it. If you want to keep it, add the file to output_files or use CMSSW.get_edm_output\n"
268 >                    raise CrabException(msg)
269 >                pass
270 >            pass
271 >        except CrabException, msg:
272 >            common.logger.message(str(msg))
273 >            msg='Error while manipulating ParameterSet (see previous message, if any): exiting...'
274 >            raise CrabException(msg)
275  
305        # modify Pset
306        if self.pset != None: #CarlosDaniele
307            try:
308                if (self.datasetPath): # standard job
309                    # allow to processa a fraction of events in a file
310                    PsetEdit.inputModule("INPUT")
311                    PsetEdit.maxEvent("INPUTMAXEVENTS")
312                    PsetEdit.skipEvent("INPUTSKIPEVENTS")
313                else:  # pythia like job
314                    PsetEdit.maxEvent(self.eventsPerJob)
315                    if (self.firstRun):
316                        PsetEdit.pythiaFirstRun("INPUTFIRSTRUN")  #First Run
317                    if (self.sourceSeed) :
318                        PsetEdit.pythiaSeed("INPUT")
319                        if (self.sourceSeedVtx) :
320                            PsetEdit.vtxSeed("INPUTVTX")
321                        if (self.sourceSeedG4) :
322                            PsetEdit.g4Seed("INPUTG4")
323                        if (self.sourceSeedMix) :
324                            PsetEdit.mixSeed("INPUTMIX")
325                # add FrameworkJobReport to parameter-set
326                PsetEdit.addCrabFJR(self.fjrFileName)
327                PsetEdit.psetWriter(self.configFilename())
328            except:
329                msg='Error while manipuliating ParameterSet: exiting...'
330                raise CrabException(msg)
276  
277      def DataDiscoveryAndLocation(self, cfg_params):
278  
279          import DataDiscovery
335        import DataDiscovery_DBS2
280          import DataLocation
281          common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
282  
# Line 341 | Line 285 | class Cmssw(JobType):
285          ## Contact the DBS
286          common.logger.message("Contacting Data Discovery Services ...")
287          try:
288 <
345 <            if self.use_dbs_1 == 1 :
346 <                self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
347 <            else :
348 <                self.pubdata=DataDiscovery_DBS2.DataDiscovery_DBS2(datasetPath, cfg_params)
288 >            self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params,self.skip_blocks)
289              self.pubdata.fetchDBSInfo()
290  
291          except DataDiscovery.NotExistingDatasetError, ex :
# Line 357 | Line 297 | class Cmssw(JobType):
297          except DataDiscovery.DataDiscoveryError, ex:
298              msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
299              raise CrabException(msg)
360        except DataDiscovery_DBS2.NotExistingDatasetError_DBS2, ex :
361            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
362            raise CrabException(msg)
363        except DataDiscovery_DBS2.NoDataTierinProvenanceError_DBS2, ex :
364            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
365            raise CrabException(msg)
366        except DataDiscovery_DBS2.DataDiscoveryError_DBS2, ex:
367            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
368            raise CrabException(msg)
300  
301          self.filesbyblock=self.pubdata.getFiles()
302 <        self.eventsbyblock=self.pubdata.getEventsPerBlock()
303 <        self.eventsbyfile=self.pubdata.getEventsPerFile()
302 >        #print self.filesbyblock
303 >        self.conf['pubdata']=self.pubdata
304  
305          ## get max number of events
306 <        self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
306 >        self.maxEvents=self.pubdata.getMaxEvents()
307  
308          ## Contact the DLS and build a list of sites hosting the fileblocks
309          try:
310              dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
311              dataloc.fetchDLSInfo()
312 +
313          except DataLocation.DataLocationError , ex:
314              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
315              raise CrabException(msg)
384        
316  
317 <        sites = dataloc.getSites()
317 >
318 >        unsorted_sites = dataloc.getSites()
319 >        #print "Unsorted :",unsorted_sites
320 >        sites = self.filesbyblock.fromkeys(self.filesbyblock,'')
321 >        for lfn in self.filesbyblock.keys():
322 >            #print lfn
323 >            if unsorted_sites.has_key(lfn):
324 >                #print "Found ",lfn
325 >                sites[lfn]=unsorted_sites[lfn]
326 >            else:
327 >                #print "Not Found ",lfn
328 >                sites[lfn]=[]
329 >        #print sites
330 >
331 >        #print "Sorted :",sites
332 >        if len(sites)==0:
333 >            msg = 'ERROR ***: no location for any of the blocks of this dataset: \n\t %s \n'%datasetPath
334 >            msg += "\tMaybe the dataset is located only at T1's (or at T0), where analysis jobs are not allowed\n"
335 >            msg += "\tPlease check DataDiscovery page https://cmsweb.cern.ch/dbs_discovery/\n"
336 >            raise CrabException(msg)
337 >
338          allSites = []
339          listSites = sites.values()
340          for listSite in listSites:
341              for oneSite in listSite:
342                  allSites.append(oneSite)
343 <        allSites = self.uniquelist(allSites)
343 >        [allSites.append(it) for it in allSites if not allSites.count(it)]
344 >
345  
346          # screen output
347          common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
348  
349          return sites
398    
399    def jobSplittingByBlocks(self, blockSites):
400        """
401        Perform job splitting. Jobs run over an integer number of files
402        and no more than one block.
403        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
404        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
405                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
406                  self.maxEvents, self.filesbyblock
407        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
408              self.total_number_of_jobs - Total # of jobs
409              self.list_of_args - File(s) job will run on (a list of lists)
410        """
411
412        # ---- Handle the possible job splitting configurations ---- #
413        if (self.selectTotalNumberEvents):
414            totalEventsRequested = self.total_number_of_events
415        if (self.selectEventsPerJob):
416            eventsPerJobRequested = self.eventsPerJob
417            if (self.selectNumberOfJobs):
418                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
419
420        # If user requested all the events in the dataset
421        if (totalEventsRequested == -1):
422            eventsRemaining=self.maxEvents
423        # If user requested more events than are in the dataset
424        elif (totalEventsRequested > self.maxEvents):
425            eventsRemaining = self.maxEvents
426            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
427        # If user requested less events than are in the dataset
428        else:
429            eventsRemaining = totalEventsRequested
350  
431        # If user requested more events per job than are in the dataset
432        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
433            eventsPerJobRequested = self.maxEvents
434
435        # For user info at end
436        totalEventCount = 0
437
438        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
439            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
440
441        if (self.selectNumberOfJobs):
442            common.logger.message("May not create the exact number_of_jobs requested.")
443
444        if ( self.ncjobs == 'all' ) :
445            totalNumberOfJobs = 999999999
446        else :
447            totalNumberOfJobs = self.ncjobs
448            
449
450        blocks = blockSites.keys()
451        blockCount = 0
452        # Backup variable in case self.maxEvents counted events in a non-included block
453        numBlocksInDataset = len(blocks)
454
455        jobCount = 0
456        list_of_lists = []
457
458        # list tracking which jobs are in which jobs belong to which block
459        jobsOfBlock = {}
460
461        # ---- Iterate over the blocks in the dataset until ---- #
462        # ---- we've met the requested total # of events    ---- #
463        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
464            block = blocks[blockCount]
465            blockCount += 1
466            if block not in jobsOfBlock.keys() :
467                jobsOfBlock[block] = []
468            
469            if self.eventsbyblock.has_key(block) :
470                numEventsInBlock = self.eventsbyblock[block]
471                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
472            
473                files = self.filesbyblock[block]
474                numFilesInBlock = len(files)
475                if (numFilesInBlock <= 0):
476                    continue
477                fileCount = 0
351  
352 <                # ---- New block => New job ---- #
480 <                parString = "\\{"
481 <                # counter for number of events in files currently worked on
482 <                filesEventCount = 0
483 <                # flag if next while loop should touch new file
484 <                newFile = 1
485 <                # job event counter
486 <                jobSkipEventCount = 0
487 <            
488 <                # ---- Iterate over the files in the block until we've met the requested ---- #
489 <                # ---- total # of events or we've gone over all the files in this block  ---- #
490 <                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
491 <                    file = files[fileCount]
492 <                    if newFile :
493 <                        try:
494 <                            numEventsInFile = self.eventsbyfile[file]
495 <                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
496 <                            # increase filesEventCount
497 <                            filesEventCount += numEventsInFile
498 <                            # Add file to current job
499 <                            parString += '\\\"' + file + '\\\"\,'
500 <                            newFile = 0
501 <                        except KeyError:
502 <                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
503 <                        
504 <
505 <                    # if less events in file remain than eventsPerJobRequested
506 <                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) :
507 <                        # if last file in block
508 <                        if ( fileCount == numFilesInBlock-1 ) :
509 <                            # end job using last file, use remaining events in block
510 <                            # close job and touch new file
511 <                            fullString = parString[:-2]
512 <                            fullString += '\\}'
513 <                            list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
514 <                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
515 <                            self.jobDestination.append(blockSites[block])
516 <                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
517 <                            # fill jobs of block dictionary
518 <                            jobsOfBlock[block].append(jobCount+1)
519 <                            # reset counter
520 <                            jobCount = jobCount + 1
521 <                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
522 <                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
523 <                            jobSkipEventCount = 0
524 <                            # reset file
525 <                            parString = "\\{"
526 <                            filesEventCount = 0
527 <                            newFile = 1
528 <                            fileCount += 1
529 <                        else :
530 <                            # go to next file
531 <                            newFile = 1
532 <                            fileCount += 1
533 <                    # if events in file equal to eventsPerJobRequested
534 <                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
535 <                        # close job and touch new file
536 <                        fullString = parString[:-2]
537 <                        fullString += '\\}'
538 <                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
539 <                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
540 <                        self.jobDestination.append(blockSites[block])
541 <                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
542 <                        jobsOfBlock[block].append(jobCount+1)
543 <                        # reset counter
544 <                        jobCount = jobCount + 1
545 <                        totalEventCount = totalEventCount + eventsPerJobRequested
546 <                        eventsRemaining = eventsRemaining - eventsPerJobRequested
547 <                        jobSkipEventCount = 0
548 <                        # reset file
549 <                        parString = "\\{"
550 <                        filesEventCount = 0
551 <                        newFile = 1
552 <                        fileCount += 1
553 <                        
554 <                    # if more events in file remain than eventsPerJobRequested
555 <                    else :
556 <                        # close job but don't touch new file
557 <                        fullString = parString[:-2]
558 <                        fullString += '\\}'
559 <                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
560 <                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
561 <                        self.jobDestination.append(blockSites[block])
562 <                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
563 <                        jobsOfBlock[block].append(jobCount+1)
564 <                        # increase counter
565 <                        jobCount = jobCount + 1
566 <                        totalEventCount = totalEventCount + eventsPerJobRequested
567 <                        eventsRemaining = eventsRemaining - eventsPerJobRequested
568 <                        # calculate skip events for last file
569 <                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
570 <                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
571 <                        # remove all but the last file
572 <                        filesEventCount = self.eventsbyfile[file]
573 <                        parString = "\\{"
574 <                        parString += '\\\"' + file + '\\\"\,'
575 <                    pass # END if
576 <                pass # END while (iterate over files in the block)
577 <        pass # END while (iterate over blocks in the dataset)
578 <        self.ncjobs = self.total_number_of_jobs = jobCount
579 <        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
580 <            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
581 <        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
582 <        
583 <        # screen output
584 <        screenOutput = "List of jobs and available destination sites:\n\n"
352 >    def split(self, jobParams,firstJobID):
353  
354 <        # keep trace of block with no sites to print a warning at the end
355 <        noSiteBlock = []
356 <        bloskNoSite = []
589 <
590 <        blockCounter = 0
591 <        for block in blocks:
592 <            if block in jobsOfBlock.keys() :
593 <                blockCounter += 1
594 <                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),','.join(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)))
595 <                if len(self.blackWhiteListParser.checkWhiteList(self.blackWhiteListParser.checkBlackList(blockSites[block],block),block)) == 0:
596 <                    noSiteBlock.append( spanRanges(jobsOfBlock[block]) )
597 <                    bloskNoSite.append( blockCounter )
598 <        
599 <        common.logger.message(screenOutput)
600 <        if len(noSiteBlock) > 0 and len(bloskNoSite) > 0:
601 <            msg = 'WARNING: No sites are hosting any part of data for block:\n                '
602 <            virgola = ""
603 <            if len(bloskNoSite) > 1:
604 <                virgola = ","
605 <            for block in bloskNoSite:
606 <                msg += ' ' + str(block) + virgola
607 <            msg += '\n               Related jobs:\n                 '
608 <            virgola = ""
609 <            if len(noSiteBlock) > 1:
610 <                virgola = ","
611 <            for range_jobs in noSiteBlock:
612 <                msg += str(range_jobs) + virgola
613 <            msg += '\n               will not be submitted and this block of data can not be analyzed!\n'
614 <            common.logger.message(msg)
354 >        jobParams = self.dict['args']
355 >        njobs = self.dict['njobs']
356 >        self.jobDestination = self.dict['jobDestination']
357  
358 <        self.list_of_args = list_of_lists
359 <        return
358 >        if njobs==0:
359 >            raise CrabException("Ask to split "+str(njobs)+" jobs: aborting")
360  
361 <    def jobSplittingNoInput(self):
362 <        """
363 <        Perform job splitting based on number of event per job
622 <        """
623 <        common.logger.debug(5,'Splitting per events')
624 <        common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
625 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
626 <        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
627 <
628 <        if (self.total_number_of_events < 0):
629 <            msg='Cannot split jobs per Events with "-1" as total number of events'
630 <            raise CrabException(msg)
631 <
632 <        if (self.selectEventsPerJob):
633 <            if (self.selectTotalNumberEvents):
634 <                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
635 <            elif(self.selectNumberOfJobs) :  
636 <                self.total_number_of_jobs =self.theNumberOfJobs
637 <                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
638 <
639 <        elif (self.selectNumberOfJobs) :
640 <            self.total_number_of_jobs = self.theNumberOfJobs
641 <            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
642 <
643 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
644 <
645 <        # is there any remainder?
646 <        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
647 <
648 <        common.logger.debug(5,'Check  '+str(check))
649 <
650 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
651 <        if check > 0:
652 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
653 <
654 <        # argument is seed number.$i
655 <        self.list_of_args = []
656 <        for i in range(self.total_number_of_jobs):
657 <            ## Since there is no input, any site is good
658 <           # self.jobDestination.append(["Any"])
659 <            self.jobDestination.append([""]) #must be empty to write correctly the xml
660 <            args=[]
661 <            if (self.firstRun):
662 <                    ## pythia first run
663 <                #self.list_of_args.append([(str(self.firstRun)+str(i))])
664 <                args.append(str(self.firstRun)+str(i))
665 <            else:
666 <                ## no first run
667 <                #self.list_of_args.append([str(i)])
668 <                args.append(str(i))
669 <            if (self.sourceSeed):
670 <                args.append(str(self.sourceSeed)+str(i))
671 <                if (self.sourceSeedVtx):
672 <                    ## + vtx random seed
673 <                    args.append(str(self.sourceSeedVtx)+str(i))
674 <                if (self.sourceSeedG4):
675 <                    ## + G4 random seed
676 <                    args.append(str(self.sourceSeedG4)+str(i))
677 <                if (self.sourceSeedMix):    
678 <                    ## + Mix random seed
679 <                    args.append(str(self.sourceSeedMix)+str(i))
680 <                pass
681 <            pass
682 <            self.list_of_args.append(args)
683 <        pass
684 <            
685 <        # print self.list_of_args
361 >        # create the empty structure
362 >        for i in range(njobs):
363 >            jobParams.append("")
364  
365 +        listID=[]
366 +        listField=[]
367 +        listDictions=[]
368 +        exist= os.path.exists(self.argsFile)
369 +        for id in range(njobs):
370 +            job = id + int(firstJobID)
371 +            listID.append(job+1)
372 +            job_ToSave ={}
373 +            concString = ' '
374 +            argu=''
375 +            str_argu = str(job+1)
376 +            if len(jobParams[id]):
377 +                argu = {'JobID': job+1}
378 +                for i in range(len(jobParams[id])):
379 +                    argu[self.dict['params'][i]]=jobParams[id][i]
380 +                # just for debug
381 +                str_argu += concString.join(jobParams[id])
382 +            listDictions.append(argu)
383 +            job_ToSave['arguments']= str(job+1)
384 +            job_ToSave['dlsDestination']= self.jobDestination[id]
385 +            listField.append(job_ToSave)
386 +            msg="Job  %s  Arguments:  %s\n"%(str(job+1),str_argu)
387 +            msg+="\t  Destination: %s "%(str(self.jobDestination[id]))
388 +            common.logger.debug(5,msg)
389 +        # write xml
390 +        if len(listDictions):
391 +            if exist==False: self.CreateXML()
392 +            self.addEntry(listDictions)
393 +            self.addXMLfile()
394 +        common._db.updateJob_(listID,listField)
395 +        self.zipTarFile()
396          return
397 +      
398 +    def addXMLfile(self):
399  
400 +        import tarfile
401 +       # try:
402 +        print self.argsFile
403 +        tar = tarfile.open(self.tarNameWithPath, "a")
404 +        tar.add(self.argsFile, os.path.basename(self.argsFile))
405 +        tar.close()
406 +       ## except:
407 +       #     pass
408  
409 <    def jobSplittingForScript(self):#CarlosDaniele
409 >  
410 >    def CreateXML(self):
411          """
692        Perform job splitting based on number of job
412          """
413 <        common.logger.debug(5,'Splitting per job')
414 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
696 <
697 <        self.total_number_of_jobs = self.theNumberOfJobs
698 <
699 <        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
700 <
701 <        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
702 <
703 <        # argument is seed number.$i
704 <        self.list_of_args = []
705 <        for i in range(self.total_number_of_jobs):
706 <            ## Since there is no input, any site is good
707 <           # self.jobDestination.append(["Any"])
708 <            self.jobDestination.append([""])
709 <            ## no random seed
710 <            self.list_of_args.append([str(i)])
413 >        result = IMProvNode( self.rootArgsFilename )
414 >        outfile = file( self.argsFile, 'w').write(str(result))
415          return
416  
417 <    def split(self, jobParams):
418 <
419 <        common.jobDB.load()
716 <        #### Fabio
717 <        njobs = self.total_number_of_jobs
718 <        arglist = self.list_of_args
719 <        # create the empty structure
720 <        for i in range(njobs):
721 <            jobParams.append("")
722 <        
723 <        for job in range(njobs):
724 <            jobParams[job] = arglist[job]
725 <            # print str(arglist[job])
726 <            # print jobParams[job]
727 <            common.jobDB.setArguments(job, jobParams[job])
728 <            common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job]))
729 <            common.jobDB.setDestination(job, self.jobDestination[job])
417 >    def addEntry(self, listDictions):
418 >        """
419 >        _addEntry_
420  
421 <        common.jobDB.save()
421 >        add an entry to the xml file
422 >        """
423 >        from IMProv.IMProvLoader import loadIMProvFile
424 >        ## load xml
425 >        improvDoc = loadIMProvFile(self.argsFile)
426 >        entrname= 'Job'
427 >        for dictions in listDictions:
428 >           report = IMProvNode(entrname , None, **dictions)
429 >           improvDoc.addNode(report)
430 >        outfile = file( self.argsFile, 'w').write(str(improvDoc))
431          return
432 <    
734 <    def getJobTypeArguments(self, nj, sched):
735 <        result = ''
736 <        for i in common.jobDB.arguments(nj):
737 <            result=result+str(i)+" "
738 <        return result
739 <  
432 >
433      def numberOfJobs(self):
434 <        # Fabio
742 <        return self.total_number_of_jobs
434 >        return self.dict['njobs']
435  
436      def getTarBall(self, exe):
437          """
438          Return the TarBall with lib and exe
439          """
440 <        
441 <        # if it exist, just return it
442 <        #
751 <        # Marco. Let's start to use relative path for Boss XML files
752 <        #
753 <        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
754 <        if os.path.exists(self.tgzNameWithPath):
755 <            return self.tgzNameWithPath
440 >        self.tarNameWithPath = common.work_space.pathForTgz()+self.tar_name
441 >        if os.path.exists(self.tarNameWithPath):
442 >            return self.tarNameWithPath
443  
444          # Prepare a tar gzipped file with user binaries.
445          self.buildTar_(exe)
446  
447 <        return string.strip(self.tgzNameWithPath)
447 >        return string.strip(self.tarNameWithPath)
448  
449      def buildTar_(self, executable):
450  
451          # First of all declare the user Scram area
452          swArea = self.scram.getSWArea_()
766        #print "swArea = ", swArea
767        # swVersion = self.scram.getSWVersion()
768        # print "swVersion = ", swVersion
453          swReleaseTop = self.scram.getReleaseTop_()
454 <        #print "swReleaseTop = ", swReleaseTop
771 <        
454 >
455          ## check if working area is release top
456          if swReleaseTop == '' or swArea == swReleaseTop:
457 +            common.logger.debug(3,"swArea = "+swArea+" swReleaseTop ="+swReleaseTop)
458              return
459  
460          import tarfile
461          try: # create tar ball
462 <            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
462 >            #tar = tarfile.open(self.tgzNameWithPath, "w:gz")
463 >            tar = tarfile.open(self.tarNameWithPath, "w")
464              ## First find the executable
465              if (self.executable != ''):
466                  exeWithPath = self.scram.findFile_(executable)
467                  if ( not exeWithPath ):
468                      raise CrabException('User executable '+executable+' not found')
469 <    
469 >
470                  ## then check if it's private or not
471                  if exeWithPath.find(swReleaseTop) == -1:
472                      # the exe is private, so we must ship
# Line 790 | Line 475 | class Cmssw(JobType):
475                      # distinguish case when script is in user project area or given by full path somewhere else
476                      if exeWithPath.find(path) >= 0 :
477                          exe = string.replace(exeWithPath, path,'')
478 <                        tar.add(path+exe,os.path.basename(executable))
478 >                        tar.add(path+exe,exe)
479                      else :
480                          tar.add(exeWithPath,os.path.basename(executable))
481                      pass
482                  else:
483                      # the exe is from release, we'll find it on WN
484                      pass
485 <    
485 >
486              ## Now get the libraries: only those in local working area
487 +            tar.dereference=True
488              libDir = 'lib'
489              lib = swArea+'/' +libDir
490              common.logger.debug(5,"lib "+lib+" to be tarred")
491              if os.path.exists(lib):
492                  tar.add(lib,libDir)
493 <    
493 >
494              ## Now check if module dir is present
495              moduleDir = 'module'
496              module = swArea + '/' + moduleDir
497              if os.path.isdir(module):
498                  tar.add(module,moduleDir)
499 +            tar.dereference=False
500  
501              ## Now check if any data dir(s) is present
502 <            swAreaLen=len(swArea)
503 <            for root, dirs, files in os.walk(swArea):
504 <                if "data" in dirs:
505 <                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
506 <                    tar.add(root+"/data",root[swAreaLen:]+"/data")
507 <
508 <            ## Add ProdAgent dir to tar
509 <            paDir = 'ProdAgentApi'
510 <            pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi'
511 <            if os.path.isdir(pa):
512 <                tar.add(pa,paDir)
513 <
514 <            ### FEDE FOR DBS PUBLICATION
515 <            ## Add PRODCOMMON dir to tar
516 <            prodcommonDir = 'ProdCommon'
517 <            prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon'
518 <            if os.path.isdir(prodcommonPath):
519 <                tar.add(prodcommonPath,prodcommonDir)
520 <            #############################    
521 <        
522 <            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
502 >            self.dataExist = False
503 >            todo_list = [(i, i) for i in  os.listdir(swArea+"/src")]
504 >            while len(todo_list):
505 >                entry, name = todo_list.pop()
506 >                if name.startswith('crab_0_') or  name.startswith('.') or name == 'CVS':
507 >                    continue
508 >                if os.path.isdir(swArea+"/src/"+entry):
509 >                    entryPath = entry + '/'
510 >                    todo_list += [(entryPath + i, i) for i in  os.listdir(swArea+"/src/"+entry)]
511 >                    if name == 'data':
512 >                        self.dataExist=True
513 >                        common.logger.debug(5,"data "+entry+" to be tarred")
514 >                        tar.add(swArea+"/src/"+entry,"src/"+entry)
515 >                    pass
516 >                pass
517 >
518 >            ### CMSSW ParameterSet
519 >            if not self.pset is None:
520 >                cfg_file = common.work_space.jobDir()+self.configFilename()
521 >                tar.add(cfg_file,self.configFilename())
522 >
523 >
524 >            ## Add ProdCommon dir to tar
525 >            prodcommonDir = './'
526 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'external/'
527 >            neededStuff = ['ProdCommon/__init__.py','ProdCommon/FwkJobRep', 'ProdCommon/CMSConfigTools', \
528 >                           'ProdCommon/Core', 'ProdCommon/MCPayloads', 'IMProv', 'ProdCommon/Storage', \
529 >                           'WMCore/__init__.py','WMCore/Algorithms']
530 >            for file in neededStuff:
531 >                tar.add(prodcommonPath+file,prodcommonDir+file)
532 >
533 >            ##### ML stuff
534 >            ML_file_list=['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py']
535 >            path=os.environ['CRABDIR'] + '/python/'
536 >            for file in ML_file_list:
537 >                tar.add(path+file,file)
538 >
539 >            ##### Utils
540 >            Utils_file_list=['parseCrabFjr.py','writeCfg.py', 'fillCrabFjr.py','cmscp.py']
541 >            for file in Utils_file_list:
542 >                tar.add(path+file,file)
543 >
544 >            ##### AdditionalFiles
545 >            tar.dereference=True
546 >            for file in self.additional_inbox_files:
547 >                tar.add(file,string.split(file,'/')[-1])
548 >            tar.dereference=False
549 >            common.logger.debug(5,"Files in "+self.tarNameWithPath+" : "+str(tar.getnames()))
550 >
551              tar.close()
552 <        except :
553 <            raise CrabException('Could not create tar-ball')
552 >        except IOError, exc:
553 >            common.logger.write(str(exc))
554 >            raise CrabException('Could not create tar-ball '+self.tarNameWithPath)
555 >        except tarfile.TarError, exc:
556 >            common.logger.write(str(exc))
557 >            raise CrabException('Could not create tar-ball '+self.tarNameWithPath)
558 >  
559 >    def zipTarFile(self):  
560 >
561 >        cmd = "gzip -c %s > %s "%(self.tarNameWithPath,self.tgzNameWithPath)
562 >        res=runCommand(cmd)
563  
840        ## check for tarball size
564          tarballinfo = os.stat(self.tgzNameWithPath)
565          if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
566 <            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
566 >            msg  = 'Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) \
567 >               +'MB input sandbox limit \n'
568 >            msg += '      and not supported by the direct GRID submission system.\n'
569 >            msg += '      Please use the CRAB server mode by setting server_name=<NAME> in section [CRAB] of your crab.cfg.\n'
570 >            msg += '      For further infos please see https://twiki.cern.ch/twiki/bin/view/CMS/CrabServer#CRABSERVER_for_Users'
571 >            raise CrabException(msg)
572  
573          ## create tar-ball with ML stuff
846        self.MLtgzfile =  common.work_space.pathForTgz()+'share/MLfiles.tgz'
847        try:
848            tar = tarfile.open(self.MLtgzfile, "w:gz")
849            path=os.environ['CRABDIR'] + '/python/'
850            for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py']:
851                tar.add(path+file,file)
852            common.logger.debug(5,"Files added to "+self.MLtgzfile+" : "+str(tar.getnames()))
853            tar.close()
854        except :
855            raise CrabException('Could not create ML files tar-ball')
856        
857        return
858        
859    def additionalInputFileTgz(self):
860        """
861        Put all additional files into a tar ball and return its name
862        """
863        import tarfile
864        tarName=  common.work_space.pathForTgz()+'share/'+self.additional_tgz_name
865        tar = tarfile.open(tarName, "w:gz")
866        for file in self.additional_inbox_files:
867            tar.add(file,string.split(file,'/')[-1])
868        common.logger.debug(5,"Files added to "+self.additional_tgz_name+" : "+str(tar.getnames()))
869        tar.close()
870        return tarName
574  
575 <    def wsSetupEnvironment(self, nj):
575 >    def wsSetupEnvironment(self, nj=0):
576          """
577          Returns part of a job script which prepares
578          the execution environment for the job 'nj'.
579          """
580 +        # FUTURE: Drop support for .cfg when possible
581 +        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
582 +            psetName = 'pset.py'
583 +        else:
584 +            psetName = 'pset.cfg'
585          # Prepare JobType-independent part
586 <        txt = ''
587 <  
588 <        ## OLI_Daniele at this level  middleware already known
881 <
882 <        txt += 'if [ $middleware == LCG ]; then \n'
883 <        txt += '    echo "### First set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
884 <        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
885 <        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
586 >        txt = '\n#Written by cms_cmssw::wsSetupEnvironment\n'
587 >        txt += 'echo ">>> setup environment"\n'
588 >        txt += 'if [ $middleware == LCG ] || [ $middleware == CAF ] || [ $middleware == LSF ]; then \n'
589          txt += self.wsSetupCMSLCGEnvironment_()
590          txt += 'elif [ $middleware == OSG ]; then\n'
591          txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
592 <        txt += '    echo "Created working directory: $WORKING_DIR"\n'
593 <        txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
594 <        txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
595 <        txt += '    echo "JOB_EXIT_STATUS = 10016"\n'
893 <        txt += '    echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
894 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
895 <        txt += '        rm -f $RUNTIME_AREA/$repo \n'
896 <        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
897 <        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
898 <        txt += '        exit 1\n'
592 >        txt += '    if [ ! $? == 0 ] ;then\n'
593 >        txt += '        echo "ERROR ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
594 >        txt += '        job_exit_code=10016\n'
595 >        txt += '        func_exit\n'
596          txt += '    fi\n'
597 +        txt += '    echo ">>> Created working directory: $WORKING_DIR"\n'
598          txt += '\n'
599          txt += '    echo "Change to working directory: $WORKING_DIR"\n'
600          txt += '    cd $WORKING_DIR\n'
601 <        txt += self.wsSetupCMSOSGEnvironment_()
602 <        txt += '    echo "### Set SCRAM ARCH to ' + self.executable_arch + ' ###"\n'
603 <        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
601 >        txt += '    echo ">>> current directory (WORKING_DIR): $WORKING_DIR"\n'
602 >        txt += self.wsSetupCMSOSGEnvironment_()
603 >        #Setup SGE Environment
604 >        txt += 'elif [ $middleware == SGE ]; then\n'
605 >        txt += self.wsSetupCMSLCGEnvironment_()
606 >
607 >        txt += 'elif [ $middleware == ARC ]; then\n'
608 >        txt += self.wsSetupCMSLCGEnvironment_()
609 >
610          txt += 'fi\n'
611  
612          # Prepare JobType-specific part
613          scram = self.scram.commandName()
614          txt += '\n\n'
615 <        txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
615 >        txt += 'echo ">>> specific cmssw setup environment:"\n'
616 >        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
617          txt += scram+' project CMSSW '+self.version+'\n'
618          txt += 'status=$?\n'
619          txt += 'if [ $status != 0 ] ; then\n'
620 <        txt += '   echo "SET_EXE_ENV 10034 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
621 <        txt += '   echo "JOB_EXIT_STATUS = 10034"\n'
622 <        txt += '   echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n'
918 <        txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
919 <        txt += '   rm -f $RUNTIME_AREA/$repo \n'
920 <        txt += '   echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
921 <        txt += '   echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
922 <        ## OLI_Daniele
923 <        txt += '    if [ $middleware == OSG ]; then \n'
924 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
925 <        txt += '        cd $RUNTIME_AREA\n'
926 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
927 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
928 <        txt += '            echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
929 <        txt += '            echo "JOB_EXIT_STATUS = 10018"\n'
930 <        txt += '            echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
931 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
932 <        txt += '            rm -f $RUNTIME_AREA/$repo \n'
933 <        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
934 <        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
935 <        txt += '        fi\n'
936 <        txt += '    fi \n'
937 <        txt += '   exit 1 \n'
620 >        txt += '    echo "ERROR ==> CMSSW '+self.version+' not found on `hostname`" \n'
621 >        txt += '    job_exit_code=10034\n'
622 >        txt += '    func_exit\n'
623          txt += 'fi \n'
939        txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
624          txt += 'cd '+self.version+'\n'
625 <        ########## FEDE FOR DBS2 ######################
626 <        txt += 'SOFTWARE_DIR=`pwd`\n'
943 <        txt += 'echo SOFTWARE_DIR=$SOFTWARE_DIR \n'
944 <        ###############################################
945 <        ### needed grep for bug in scramv1 ###
946 <        txt += scram+' runtime -sh\n'
625 >        txt += 'SOFTWARE_DIR=`pwd`; export SOFTWARE_DIR\n'
626 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
627          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
628 <        txt += 'echo $PATH\n'
629 <
628 >        txt += 'if [ $? != 0 ] ; then\n'
629 >        txt += '    echo "ERROR ==> Problem with the command: "\n'
630 >        txt += '    echo "eval \`'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME \` at `hostname`"\n'
631 >        txt += '    job_exit_code=10034\n'
632 >        txt += '    func_exit\n'
633 >        txt += 'fi \n'
634          # Handle the arguments:
635          txt += "\n"
636          txt += "## number of arguments (first argument always jobnumber)\n"
637          txt += "\n"
638 < #        txt += "narg=$#\n"
955 <        txt += "if [ $nargs -lt 2 ]\n"
638 >        txt += "if [ $nargs -lt "+str(self.argsList)+" ]\n"
639          txt += "then\n"
640 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$nargs+ \n"
641 <        txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
642 <        txt += '    echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n'
960 <        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
961 <        txt += '    rm -f $RUNTIME_AREA/$repo \n'
962 <        txt += '    echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
963 <        txt += '    echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
964 <        ## OLI_Daniele
965 <        txt += '    if [ $middleware == OSG ]; then \n'
966 <        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
967 <        txt += '        cd $RUNTIME_AREA\n'
968 <        txt += '        /bin/rm -rf $WORKING_DIR\n'
969 <        txt += '        if [ -d $WORKING_DIR ] ;then\n'
970 <        txt += '            echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
971 <        txt += '            echo "JOB_EXIT_STATUS = 50114"\n'
972 <        txt += '            echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
973 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
974 <        txt += '            rm -f $RUNTIME_AREA/$repo \n'
975 <        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
976 <        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
977 <        txt += '        fi\n'
978 <        txt += '    fi \n'
979 <        txt += "    exit 1\n"
640 >        txt += "    echo 'ERROR ==> Too few arguments' +$nargs+ \n"
641 >        txt += '    job_exit_code=50113\n'
642 >        txt += "    func_exit\n"
643          txt += "fi\n"
644          txt += "\n"
645  
646          # Prepare job-specific part
647          job = common.job_list[nj]
648 <        ### FEDE FOR DBS OUTPUT PUBLICATION
649 <        if (self.datasetPath):
648 >        if (self.datasetPath):
649 >            self.primaryDataset = self.datasetPath.split("/")[1]
650 >            DataTier = self.datasetPath.split("/")[2]
651              txt += '\n'
652              txt += 'DatasetPath='+self.datasetPath+'\n'
653  
654 <            datasetpath_split = self.datasetPath.split("/")
655 <            
992 <            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
993 <            txt += 'DataTier='+datasetpath_split[2]+'\n'
994 <            #txt += 'ProcessedDataset='+datasetpath_split[3]+'\n'
654 >            txt += 'PrimaryDataset='+self.primaryDataset +'\n'
655 >            txt += 'DataTier='+DataTier+'\n'
656              txt += 'ApplicationFamily=cmsRun\n'
657  
658          else:
659 +            self.primaryDataset = 'null'
660              txt += 'DatasetPath=MCDataTier\n'
661              txt += 'PrimaryDataset=null\n'
662              txt += 'DataTier=null\n'
1001            #txt += 'ProcessedDataset=null\n'
663              txt += 'ApplicationFamily=MCDataTier\n'
664 <        if self.pset != None: #CarlosDaniele
664 >        if self.pset != None:
665              pset = os.path.basename(job.configFilename())
666              txt += '\n'
667              txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
1007            if (self.datasetPath): # standard job
1008                #txt += 'InputFiles=$2\n'
1009                txt += 'InputFiles=${args[1]}\n'
1010                txt += 'MaxEvents=${args[2]}\n'
1011                txt += 'SkipEvents=${args[3]}\n'
1012                txt += 'echo "Inputfiles:<$InputFiles>"\n'
1013                txt += 'sed "s#{\'INPUT\'}#$InputFiles#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1014                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
1015                txt += 'sed "s#INPUTMAXEVENTS#$MaxEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1016                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
1017                txt += 'sed "s#INPUTSKIPEVENTS#$SkipEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1018            else:  # pythia like job
1019                seedIndex=1
1020                if (self.firstRun):
1021                    txt += 'FirstRun=${args['+str(seedIndex)+']}\n'
1022                    txt += 'echo "FirstRun: <$FirstRun>"\n'
1023                    txt += 'sed "s#\<INPUTFIRSTRUN\>#$FirstRun#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1024                    seedIndex=seedIndex+1
1025
1026                if (self.sourceSeed):
1027                    txt += 'Seed=${args['+str(seedIndex)+']}\n'
1028                    txt += 'sed "s#\<INPUT\>#$Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1029                    seedIndex=seedIndex+1
1030                    ## the following seeds are not always present
1031                    if (self.sourceSeedVtx):
1032                        txt += 'VtxSeed=${args['+str(seedIndex)+']}\n'
1033                        txt += 'echo "VtxSeed: <$VtxSeed>"\n'
1034                        txt += 'sed "s#\<INPUTVTX\>#$VtxSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1035                        seedIndex += 1
1036                    if (self.sourceSeedG4):
1037                        txt += 'G4Seed=${args['+str(seedIndex)+']}\n'
1038                        txt += 'echo "G4Seed: <$G4Seed>"\n'
1039                        txt += 'sed "s#\<INPUTG4\>#$G4Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1040                        seedIndex += 1
1041                    if (self.sourceSeedMix):
1042                        txt += 'mixSeed=${args['+str(seedIndex)+']}\n'
1043                        txt += 'echo "MixSeed: <$mixSeed>"\n'
1044                        txt += 'sed "s#\<INPUTMIX\>#$mixSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1045                        seedIndex += 1
1046                    pass
1047                pass
1048            txt += 'mv -f '+pset+' pset.cfg\n'
668  
669 <        if len(self.additional_inbox_files) > 0:
670 <            txt += 'if [ -e $RUNTIME_AREA/'+self.additional_tgz_name+' ] ; then\n'
671 <            txt += '  tar xzvf $RUNTIME_AREA/'+self.additional_tgz_name+'\n'
672 <            txt += 'fi\n'
1054 <            pass
669 >            txt += 'PreserveSeeds='  + ','.join(self.preserveSeeds)  + '; export PreserveSeeds\n'
670 >            txt += 'IncrementSeeds=' + ','.join(self.incrementSeeds) + '; export IncrementSeeds\n'
671 >            txt += 'echo "PreserveSeeds: <$PreserveSeeds>"\n'
672 >            txt += 'echo "IncrementSeeds:<$IncrementSeeds>"\n'
673  
674 <        if self.pset != None: #CarlosDaniele
675 <            txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
676 <        
677 <            txt += '\n'
678 <            txt += 'echo "***** cat pset.cfg *********"\n'
1061 <            txt += 'cat pset.cfg\n'
1062 <            txt += 'echo "****** end pset.cfg ********"\n'
674 >            txt += 'mv -f ' + pset + ' ' + psetName + '\n'
675 >
676 >
677 >        if self.pset != None:
678 >            # FUTURE: Can simply for 2_1_x and higher
679              txt += '\n'
680 <            ### FEDE FOR DBS OUTPUT PUBLICATION
681 <            txt += 'PSETHASH=`EdmConfigHash < pset.cfg` \n'
680 >            if self.debug_wrapper == 1:
681 >                txt += 'echo "***** cat ' + psetName + ' *********"\n'
682 >                txt += 'cat ' + psetName + '\n'
683 >                txt += 'echo "****** end ' + psetName + ' ********"\n'
684 >                txt += '\n'
685 >                txt += 'echo "***********************" \n'
686 >                txt += 'which edmConfigHash \n'
687 >                txt += 'echo "***********************" \n'
688 >            if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
689 >                txt += 'edmConfigHash ' + psetName + ' \n'
690 >                txt += 'PSETHASH=`edmConfigHash ' + psetName + '` \n'
691 >            else:
692 >                txt += 'PSETHASH=`edmConfigHash < ' + psetName + '` \n'
693              txt += 'echo "PSETHASH = $PSETHASH" \n'
694 <            ##############
694 >            #### FEDE temporary fix for noEdm files #####
695 >            txt += 'if [ -z "$PSETHASH" ]; then \n'
696 >            txt += '   export PSETHASH=null\n'
697 >            txt += 'fi \n'
698 >            #############################################
699              txt += '\n'
1069            # txt += 'echo "***** cat pset1.cfg *********"\n'
1070            # txt += 'cat pset1.cfg\n'
1071            # txt += 'echo "****** end pset1.cfg ********"\n'
700          return txt
701  
702 <    def wsBuildExe(self, nj=0):
702 >    def wsUntarSoftware(self, nj=0):
703          """
704          Put in the script the commands to build an executable
705          or a library.
706          """
707  
708 <        txt = ""
708 >        txt = '\n#Written by cms_cmssw::wsUntarSoftware\n'
709  
710          if os.path.isfile(self.tgzNameWithPath):
711 <            txt += 'echo "tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'"\n'
712 <            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
711 >            txt += 'echo ">>> tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+' :" \n'
712 >            txt += 'tar zxvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
713 >            if  self.debug_wrapper==1 :
714 >                txt += 'ls -Al \n'
715              txt += 'untar_status=$? \n'
716              txt += 'if [ $untar_status -ne 0 ]; then \n'
717 <            txt += '   echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n'
718 <            txt += '   echo "JOB_EXIT_STATUS = $untar_status" \n'
719 <            txt += '   echo "JobExitCode=$untar_status" | tee -a $RUNTIME_AREA/$repo\n'
1090 <            txt += '   if [ $middleware == OSG ]; then \n'
1091 <            txt += '       echo "Remove working directory: $WORKING_DIR"\n'
1092 <            txt += '       cd $RUNTIME_AREA\n'
1093 <            txt += '       /bin/rm -rf $WORKING_DIR\n'
1094 <            txt += '       if [ -d $WORKING_DIR ] ;then\n'
1095 <            txt += '           echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n'
1096 <            txt += '           echo "JOB_EXIT_STATUS = 50999"\n'
1097 <            txt += '           echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n'
1098 <            txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1099 <            txt += '           rm -f $RUNTIME_AREA/$repo \n'
1100 <            txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1101 <            txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1102 <            txt += '       fi\n'
1103 <            txt += '   fi \n'
1104 <            txt += '   \n'
1105 <            txt += '   exit 1 \n'
717 >            txt += '   echo "ERROR ==> Untarring .tgz file failed"\n'
718 >            txt += '   job_exit_code=$untar_status\n'
719 >            txt += '   func_exit\n'
720              txt += 'else \n'
721              txt += '   echo "Successful untar" \n'
722              txt += 'fi \n'
723              txt += '\n'
724 <            txt += 'echo "Include ProdAgentApi and PRODCOMMON in PYTHONPATH"\n'
724 >            txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
725              txt += 'if [ -z "$PYTHONPATH" ]; then\n'
726 <            #### FEDE FOR DBS OUTPUT PUBLICATION
1113 <            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon\n'
1114 <            #txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon\n'
1115 <            #txt += '   export PYTHONPATH=ProdAgentApi\n'
726 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
727              txt += 'else\n'
728 <            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n'
1118 <            #txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon:${PYTHONPATH}\n'
1119 <            #txt += '   export PYTHONPATH=ProdAgentApi:${PYTHONPATH}\n'
728 >            txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
729              txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1121            ###################  
730              txt += 'fi\n'
731              txt += '\n'
732  
733              pass
734 <        
734 >
735          return txt
736  
737 <    def modifySteeringCards(self, nj):
737 >    def wsBuildExe(self, nj=0):
738          """
739 <        modify the card provided by the user,
740 <        writing a new card into share dir
739 >        Put in the script the commands to build an executable
740 >        or a library.
741          """
742 <        
742 >
743 >        txt = '\n#Written by cms_cmssw::wsBuildExe\n'
744 >        txt += 'echo ">>> moving CMSSW software directories in `pwd`" \n'
745 >
746 >        txt += 'rm -r lib/ module/ \n'
747 >        txt += 'mv $RUNTIME_AREA/lib/ . \n'
748 >        txt += 'mv $RUNTIME_AREA/module/ . \n'
749 >        if self.dataExist == True:
750 >            txt += 'rm -r src/ \n'
751 >            txt += 'mv $RUNTIME_AREA/src/ . \n'
752 >        if len(self.additional_inbox_files)>0:
753 >            for file in self.additional_inbox_files:
754 >                txt += 'mv $RUNTIME_AREA/'+os.path.basename(file)+' . \n'
755 >        # txt += 'mv $RUNTIME_AREA/ProdCommon/ . \n'
756 >        # txt += 'mv $RUNTIME_AREA/IMProv/ . \n'
757 >
758 >        txt += 'echo ">>> Include $RUNTIME_AREA in PYTHONPATH:"\n'
759 >        txt += 'if [ -z "$PYTHONPATH" ]; then\n'
760 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/\n'
761 >        txt += 'else\n'
762 >        txt += '   export PYTHONPATH=$RUNTIME_AREA/:${PYTHONPATH}\n'
763 >        txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
764 >        txt += 'fi\n'
765 >        txt += '\n'
766 >
767 >        return txt
768 >
769 >
770      def executableName(self):
771 <        if self.scriptExe: #CarlosDaniele
771 >        if self.scriptExe:
772              return "sh "
773          else:
774              return self.executable
775  
776      def executableArgs(self):
777 <        if self.scriptExe:#CarlosDaniele
778 <            return   self.scriptExe + " $NJob"
777 >        # FUTURE: This function tests the CMSSW version. Can be simplified as we drop support for old versions
778 >        if self.scriptExe:
779 >            return self.scriptExe + " $NJob"
780          else:
781 <            # if >= CMSSW_1_5_X, add -e
782 <            version_array = self.scram.getSWVersion().split('_')
783 <            major = 0
784 <            minor = 0
785 <            try:
1150 <                major = int(version_array[1])
1151 <                minor = int(version_array[2])
1152 <            except:
1153 <                msg = "Cannot parse CMSSW version string: " + "_".join(version_array) + " for major and minor release number!"  
1154 <                raise CrabException(msg)
1155 <            if major >= 1 and minor >= 5 :
1156 <                return " -e -p pset.cfg"
781 >            ex_args = ""
782 >            ex_args += " -j $RUNTIME_AREA/crab_fjr_$NJob.xml"
783 >            # Type of config file depends on CMSSW version
784 >            if self.CMSSW_major >= 2 :
785 >                ex_args += " -p pset.py"
786              else:
787 <                return " -p pset.cfg"
787 >                ex_args += " -p pset.cfg"
788 >            return ex_args
789  
790      def inputSandbox(self, nj):
791          """
792          Returns a list of filenames to be put in JDL input sandbox.
793          """
794          inp_box = []
1165        # # dict added to delete duplicate from input sandbox file list
1166        # seen = {}
1167        ## code
795          if os.path.isfile(self.tgzNameWithPath):
796              inp_box.append(self.tgzNameWithPath)
797 <        if os.path.isfile(self.MLtgzfile):
1171 <            inp_box.append(self.MLtgzfile)
1172 <        ## config
1173 <        if not self.pset is None:
1174 <            inp_box.append(common.work_space.pathForTgz() + 'job/' + self.configFilename())
1175 <        ## additional input files
1176 <        tgz = self.additionalInputFileTgz()
1177 <        inp_box.append(tgz)
797 >        inp_box.append(common.work_space.jobDir() + self.scriptName)
798          return inp_box
799  
800      def outputSandbox(self, nj):
# Line 1185 | Line 805 | class Cmssw(JobType):
805  
806          ## User Declared output files
807          for out in (self.output_file+self.output_file_sandbox):
808 <            n_out = nj + 1
809 <            out_box.append(self.numberFile_(out,str(n_out)))
808 >            n_out = nj + 1
809 >            out_box.append(numberFile(out,str(n_out)))
810          return out_box
811  
1192    def prepareSteeringCards(self):
1193        """
1194        Make initial modifications of the user's steering card file.
1195        """
1196        return
812  
813      def wsRenameOutput(self, nj):
814          """
815          Returns part of a job script which renames the produced files.
816          """
817  
818 <        txt = '\n'
819 <        txt += '# directory content\n'
820 <        txt += 'ls \n'
821 <
822 <        txt += 'output_exit_status=0\n'
823 <        
824 <        for fileWithSuffix in (self.output_file_sandbox):
1210 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1211 <            txt += '\n'
1212 <            txt += '# check output file\n'
1213 <            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1214 <            txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA\n'
1215 <            txt += '    cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1216 <            txt += 'else\n'
1217 <            txt += '    exit_status=60302\n'
1218 <            txt += '    echo "ERROR: Problem with output file '+fileWithSuffix+'"\n'
1219 <            if common.scheduler.boss_scheduler_name == 'condor_g':
1220 <                txt += '    if [ $middleware == OSG ]; then \n'
1221 <                txt += '        echo "prepare dummy output file"\n'
1222 <                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1223 <                txt += '    fi \n'
1224 <            txt += 'fi\n'
1225 <        
818 >        txt = '\n#Written by cms_cmssw::wsRenameOutput\n'
819 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
820 >        txt += 'echo ">>> current directory content:"\n'
821 >        if self.debug_wrapper==1:
822 >            txt += 'ls -Al\n'
823 >        txt += '\n'
824 >
825          for fileWithSuffix in (self.output_file):
826 <            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
826 >            output_file_num = numberFile(fileWithSuffix, '$NJob')
827              txt += '\n'
828              txt += '# check output file\n'
829              txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
830 <            txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA\n'
831 <            txt += '    cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
830 >            if (self.copy_data == 1):  # For OSG nodes, file is in $WORKING_DIR, should not be moved to $RUNTIME_AREA
831 >                txt += '    mv '+fileWithSuffix+' '+output_file_num+'\n'
832 >                txt += '    ln -s `pwd`/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
833 >            else:
834 >                txt += '    mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
835 >                txt += '    ln -s $RUNTIME_AREA/'+output_file_num+' $RUNTIME_AREA/'+fileWithSuffix+'\n'
836              txt += 'else\n'
837 <            txt += '    exit_status=60302\n'
838 <            txt += '    echo "ERROR: Problem with output file '+fileWithSuffix+'"\n'
839 <            txt += '    echo "JOB_EXIT_STATUS = $exit_status"\n'
1237 <            txt += '    output_exit_status=$exit_status\n'
1238 <            if common.scheduler.boss_scheduler_name == 'condor_g':
837 >            txt += '    job_exit_code=60302\n'
838 >            txt += '    echo "WARNING: Output file '+fileWithSuffix+' not found"\n'
839 >            if common.scheduler.name().upper() == 'CONDOR_G':
840                  txt += '    if [ $middleware == OSG ]; then \n'
841                  txt += '        echo "prepare dummy output file"\n'
842                  txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
# Line 1243 | Line 844 | class Cmssw(JobType):
844              txt += 'fi\n'
845          file_list = []
846          for fileWithSuffix in (self.output_file):
847 <             file_list.append(self.numberFile_(fileWithSuffix, '$NJob'))
848 <            
849 <        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
847 >             file_list.append(numberFile('$SOFTWARE_DIR/'+fileWithSuffix, '$NJob'))
848 >
849 >        txt += 'file_list="'+string.join(file_list,',')+'"\n'
850 >        txt += '\n'
851 >        txt += 'echo ">>> current directory (SOFTWARE_DIR): $SOFTWARE_DIR" \n'
852 >        txt += 'echo ">>> current directory content:"\n'
853 >        if self.debug_wrapper==1:
854 >            txt += 'ls -Al\n'
855 >        txt += '\n'
856          txt += 'cd $RUNTIME_AREA\n'
857 +        txt += 'echo ">>> current directory (RUNTIME_AREA):  $RUNTIME_AREA"\n'
858          return txt
859  
1252    def numberFile_(self, file, txt):
1253        """
1254        append _'txt' before last extension of a file
1255        """
1256        p = string.split(file,".")
1257        # take away last extension
1258        name = p[0]
1259        for x in p[1:-1]:
1260            name=name+"."+x
1261        # add "_txt"
1262        if len(p)>1:
1263            ext = p[len(p)-1]
1264            result = name + '_' + txt + "." + ext
1265        else:
1266            result = name + '_' + txt
1267        
1268        return result
1269
860      def getRequirements(self, nj=[]):
861          """
862 <        return job requirements to add to jdl files
862 >        return job requirements to add to jdl files
863          """
864          req = ''
865          if self.version:
866              req='Member("VO-cms-' + \
867                   self.version + \
868                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
869 <        ## SL add requirement for OS version only if SL4
1280 <        #reSL4 = re.compile( r'slc4' )
1281 <        if self.executable_arch: # and reSL4.search(self.executable_arch):
869 >        if self.executable_arch:
870              req+=' && Member("VO-cms-' + \
871                   self.executable_arch + \
872                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
873  
874          req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
875 +        if ( common.scheduler.name() == "glitecoll" ) or ( common.scheduler.name() == "glite"):
876 +            req += ' && other.GlueCEStateStatus == "Production" '
877  
878          return req
879  
880      def configFilename(self):
881          """ return the config filename """
882 <        return self.name()+'.cfg'
882 >        # FUTURE: Can remove cfg mode for CMSSW >= 2_1_x
883 >        if (self.CMSSW_major >= 2 and self.CMSSW_minor >= 1) or (self.CMSSW_major >= 3):
884 >          return self.name()+'.py'
885 >        else:
886 >          return self.name()+'.cfg'
887  
1294    ### OLI_DANIELE
888      def wsSetupCMSOSGEnvironment_(self):
889          """
890          Returns part of a job script which is prepares
891          the execution environment and which is common for all CMS jobs.
892          """
893 <        txt = '\n'
894 <        txt += '   echo "### SETUP CMS OSG  ENVIRONMENT ###"\n'
895 <        txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
896 <        txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
897 <        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
898 <        txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1306 <        txt += '   elif [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
893 >        txt = '\n#Written by cms_cmssw::wsSetupCMSOSGEnvironment_\n'
894 >        txt += '    echo ">>> setup CMS OSG environment:"\n'
895 >        txt += '    echo "set SCRAM ARCH to ' + self.executable_arch + '"\n'
896 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
897 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
898 >        txt += '    if [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
899          txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
900 <        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
901 <        txt += '       source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
902 <        txt += '   else\n'
903 <        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
904 <        txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
905 <        txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1314 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1315 <        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1316 <        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1317 <        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1318 <        txt += '       exit 1\n'
1319 <        txt += '\n'
1320 <        txt += '       echo "Remove working directory: $WORKING_DIR"\n'
1321 <        txt += '       cd $RUNTIME_AREA\n'
1322 <        txt += '       /bin/rm -rf $WORKING_DIR\n'
1323 <        txt += '       if [ -d $WORKING_DIR ] ;then\n'
1324 <        txt += '           echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1325 <        txt += '           echo "JOB_EXIT_STATUS = 10017"\n'
1326 <        txt += '           echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1327 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1328 <        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1329 <        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1330 <        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1331 <        txt += '       fi\n'
1332 <        txt += '\n'
1333 <        txt += '       exit 1\n'
1334 <        txt += '   fi\n'
900 >        txt += '        source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
901 >        txt += '    else\n'
902 >        txt += '        echo "ERROR ==> $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
903 >        txt += '        job_exit_code=10020\n'
904 >        txt += '        func_exit\n'
905 >        txt += '    fi\n'
906          txt += '\n'
907 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
908 <        txt += '   echo " END SETUP CMS OSG  ENVIRONMENT "\n'
907 >        txt += '    echo "==> setup cms environment ok"\n'
908 >        txt += '    echo "SCRAM_ARCH = $SCRAM_ARCH"\n'
909  
910          return txt
911 <
1341 <    ### OLI_DANIELE
911 >
912      def wsSetupCMSLCGEnvironment_(self):
913          """
914          Returns part of a job script which is prepares
915          the execution environment and which is common for all CMS jobs.
916          """
917 <        txt  = '   \n'
918 <        txt += '   echo " ### SETUP CMS LCG  ENVIRONMENT ### "\n'
919 <        txt += '   if [ ! $VO_CMS_SW_DIR ] ;then\n'
920 <        txt += '       echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n'
921 <        txt += '       echo "JOB_EXIT_STATUS = 10031" \n'
922 <        txt += '       echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
923 <        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
924 <        txt += '       rm -f $RUNTIME_AREA/$repo \n'
925 <        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
926 <        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
927 <        txt += '       exit 1\n'
928 <        txt += '   else\n'
929 <        txt += '       echo "Sourcing environment... "\n'
930 <        txt += '       if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
931 <        txt += '           echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
932 <        txt += '           echo "JOB_EXIT_STATUS = 10020"\n'
933 <        txt += '           echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
934 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
935 <        txt += '           rm -f $RUNTIME_AREA/$repo \n'
936 <        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
937 <        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
938 <        txt += '           exit 1\n'
939 <        txt += '       fi\n'
940 <        txt += '       echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
941 <        txt += '       source $VO_CMS_SW_DIR/cmsset_default.sh\n'
942 <        txt += '       result=$?\n'
943 <        txt += '       if [ $result -ne 0 ]; then\n'
1374 <        txt += '           echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1375 <        txt += '           echo "JOB_EXIT_STATUS = 10032"\n'
1376 <        txt += '           echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1377 <        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1378 <        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1379 <        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1380 <        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1381 <        txt += '           exit 1\n'
1382 <        txt += '       fi\n'
1383 <        txt += '   fi\n'
1384 <        txt += '   \n'
1385 <        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1386 <        txt += '   echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
917 >        txt = '\n#Written by cms_cmssw::wsSetupCMSLCGEnvironment_\n'
918 >        txt += '    echo ">>> setup CMS LCG environment:"\n'
919 >        txt += '    echo "set SCRAM ARCH and BUILD_ARCH to ' + self.executable_arch + ' ###"\n'
920 >        txt += '    export SCRAM_ARCH='+self.executable_arch+'\n'
921 >        txt += '    export BUILD_ARCH='+self.executable_arch+'\n'
922 >        txt += '    if [ ! $VO_CMS_SW_DIR ] ;then\n'
923 >        txt += '        echo "ERROR ==> CMS software dir not found on WN `hostname`"\n'
924 >        txt += '        job_exit_code=10031\n'
925 >        txt += '        func_exit\n'
926 >        txt += '    else\n'
927 >        txt += '        echo "Sourcing environment... "\n'
928 >        txt += '        if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
929 >        txt += '            echo "ERROR ==> cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
930 >        txt += '            job_exit_code=10020\n'
931 >        txt += '            func_exit\n'
932 >        txt += '        fi\n'
933 >        txt += '        echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
934 >        txt += '        source $VO_CMS_SW_DIR/cmsset_default.sh\n'
935 >        txt += '        result=$?\n'
936 >        txt += '        if [ $result -ne 0 ]; then\n'
937 >        txt += '            echo "ERROR ==> problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
938 >        txt += '            job_exit_code=10032\n'
939 >        txt += '            func_exit\n'
940 >        txt += '        fi\n'
941 >        txt += '    fi\n'
942 >        txt += '    \n'
943 >        txt += '    echo "==> setup cms environment ok"\n'
944          return txt
945  
946 <    ### FEDE FOR DBS OUTPUT PUBLICATION
1390 <    def modifyReport(self, nj):
946 >    def wsModifyReport(self, nj):
947          """
948 <        insert the part of the script that modifies the FrameworkJob Report
948 >        insert the part of the script that modifies the FrameworkJob Report
949          """
950  
951 <        txt = ''
952 <        try:
953 <            publish_data = int(self.cfg_params['USER.publish_data'])          
954 <        except KeyError:
955 <            publish_data = 0
956 <        if (publish_data == 1):  
1401 <            txt += 'echo "Modify Job Report" \n'
1402 <            #txt += 'chmod a+x $RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1403 <            ################ FEDE FOR DBS2 #############################################
1404 <            txt += 'chmod a+x $SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1405 <            #############################################################################
1406 <            #try:
1407 <            #    publish_data = int(self.cfg_params['USER.publish_data'])          
1408 <            #except KeyError:
1409 <            #    publish_data = 0
1410 <
1411 <            txt += 'if [ -z "$SE" ]; then\n'
1412 <            txt += '    SE="" \n'
1413 <            txt += 'fi \n'
1414 <            txt += 'if [ -z "$SE_PATH" ]; then\n'
1415 <            txt += '    SE_PATH="" \n'
1416 <            txt += 'fi \n'
1417 <            txt += 'echo "SE = $SE"\n'
1418 <            txt += 'echo "SE_PATH = $SE_PATH"\n'
951 >        txt = ''
952 >        publish_data = int(self.cfg_params.get('USER.publish_data',0))
953 >        #if (publish_data == 1):
954 >        if (self.copy_data == 1):
955 >            txt = '\n#Written by cms_cmssw::wsModifyReport\n'
956 >            publish_data = int(self.cfg_params.get('USER.publish_data',0))
957  
958 <        #if (publish_data == 1):  
959 <            #processedDataset = self.cfg_params['USER.processed_datasetname']
960 <            processedDataset = self.cfg_params['USER.publish_data_name']
961 <            txt += 'ProcessedDataset='+processedDataset+'\n'
1424 <            #### LFN=/store/user/<user>/processedDataset_PSETHASH
1425 <            txt += 'if [ "$SE_PATH" == "" ]; then\n'
1426 <            #### FEDE: added slash in LFN ##############
958 >
959 >            txt += 'if [ $StageOutExitStatus -eq 0 ]; then\n'
960 >            txt += '    FOR_LFN=$LFNBaseName\n'
961 >            txt += 'else\n'
962              txt += '    FOR_LFN=/copy_problems/ \n'
963 <            txt += 'else \n'
964 <            txt += '    tmp=`echo $SE_PATH | awk -F \'store\' \'{print$2}\'` \n'
965 <            #####  FEDE TO BE CHANGED, BECAUSE STORE IS HARDCODED!!!! ########
966 <            txt += '    FOR_LFN=/store$tmp \n'
967 <            txt += 'fi \n'
968 <            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
963 >            txt += 'fi\n'
964 >
965 >            txt += 'echo ">>> Modify Job Report:" \n'
966 >            txt += 'chmod a+x $RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py\n'
967 >            txt += 'echo "SE = $SE"\n'
968 >            txt += 'echo "SE_PATH = $SE_PATH"\n'
969              txt += 'echo "FOR_LFN = $FOR_LFN" \n'
970              txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
971 <            #txt += 'echo "$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
972 <            txt += 'echo "$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
973 <            txt += '$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
974 <            #txt += '$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
975 <      
971 >
972 >
973 >            args = 'fjr $RUNTIME_AREA/crab_fjr_$NJob.xml n_job $NJob for_lfn $FOR_LFN PrimaryDataset $PrimaryDataset  ApplicationFamily $ApplicationFamily ApplicationName $executable cmssw_version $CMSSW_VERSION psethash $PSETHASH se_name $SE se_path $SE_PATH'
974 >            if (publish_data == 1):
975 >                processedDataset = self.cfg_params['USER.publish_data_name']
976 >                txt += 'ProcessedDataset='+processedDataset+'\n'
977 >                txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
978 >                args += ' UserProcessedDataset $USER-$ProcessedDataset-$PSETHASH'
979 >
980 >            txt += 'echo "$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'"\n'
981 >            txt += '$RUNTIME_AREA/ProdCommon/FwkJobRep/ModifyJobReport.py '+str(args)+'\n'
982              txt += 'modifyReport_result=$?\n'
1442            txt += 'echo modifyReport_result = $modifyReport_result\n'
983              txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
984 <            txt += '    exit_status=1\n'
985 <            txt += '    echo "ERROR: Problem with ModifyJobReport"\n'
984 >            txt += '    modifyReport_result=70500\n'
985 >            txt += '    job_exit_code=$modifyReport_result\n'
986 >            txt += '    echo "ModifyReportResult=$modifyReport_result" | tee -a $RUNTIME_AREA/$repo\n'
987 >            txt += '    echo "WARNING: Problem with ModifyJobReport"\n'
988              txt += 'else\n'
989 <            txt += '    mv NewFrameworkJobReport.xml crab_fjr_$NJob.xml\n'
989 >            txt += '    mv NewFrameworkJobReport.xml $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
990              txt += 'fi\n'
1449        else:
1450            txt += 'echo "no data publication required"\n'
1451            #txt += 'ProcessedDataset=no_data_to_publish \n'
1452            #### FEDE: added slash in LFN ##############
1453            #txt += 'FOR_LFN=/local/ \n'
1454            #txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1455            #txt += 'echo "FOR_LFN = $FOR_LFN" \n'
991          return txt
992  
993 <    def cleanEnv(self):
994 <        ### OLI_DANIELE
995 <        txt = ''
996 <        txt += 'if [ $middleware == OSG ]; then\n'  
997 <        txt += '    cd $RUNTIME_AREA\n'
998 <        txt += '    echo "Remove working directory: $WORKING_DIR"\n'
999 <        txt += '    /bin/rm -rf $WORKING_DIR\n'
1000 <        txt += '    if [ -d $WORKING_DIR ] ;then\n'
1001 <        txt += '              echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1002 <        txt += '              echo "JOB_EXIT_STATUS = 60999"\n'
1003 <        txt += '              echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1004 <        txt += '              dumpStatus $RUNTIME_AREA/$repo\n'
1005 <        txt += '        rm -f $RUNTIME_AREA/$repo \n'
1006 <        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1007 <        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
993 >    def wsParseFJR(self):
994 >        """
995 >        Parse the FrameworkJobReport to obtain useful infos
996 >        """
997 >        txt = '\n#Written by cms_cmssw::wsParseFJR\n'
998 >        txt += 'echo ">>> Parse FrameworkJobReport crab_fjr.xml"\n'
999 >        txt += 'if [ -s $RUNTIME_AREA/crab_fjr_$NJob.xml ]; then\n'
1000 >        txt += '    if [ -s $RUNTIME_AREA/parseCrabFjr.py ]; then\n'
1001 >        txt += '        cmd_out=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --dashboard $MonitorID,$MonitorJobID '+self.debugWrap+'`\n'
1002 >        if self.debug_wrapper==1 :
1003 >            txt += '        echo "Result of parsing the FrameworkJobReport crab_fjr.xml: $cmd_out"\n'
1004 >        txt += '        executable_exit_status=`python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --exitcode`\n'
1005 >        txt += '        if [ $executable_exit_status -eq 50115 ];then\n'
1006 >        txt += '            echo ">>> crab_fjr.xml contents: "\n'
1007 >        txt += '            cat $RUNTIME_AREA/crab_fjr_$NJob.xml\n'
1008 >        txt += '            echo "Wrong FrameworkJobReport --> does not contain useful info. ExitStatus: $executable_exit_status"\n'
1009 >        txt += '        elif [ $executable_exit_status -eq -999 ];then\n'
1010 >        txt += '            echo "ExitStatus from FrameworkJobReport not available. not available. Using exit code of executable from command line."\n'
1011 >        txt += '        else\n'
1012 >        txt += '            echo "Extracted ExitStatus from FrameworkJobReport parsing output: $executable_exit_status"\n'
1013 >        txt += '        fi\n'
1014 >        txt += '    else\n'
1015 >        txt += '        echo "CRAB python script to parse CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1016          txt += '    fi\n'
1017 +          #### Patch to check input data reading for CMSSW16x Hopefully we-ll remove it asap
1018 +        txt += '    if [ $executable_exit_status -eq 0 ];then\n'
1019 +        txt += '        echo ">>> Executable succeded  $executable_exit_status"\n'
1020 +        ## This cannot more work given the changes on the Job argumentsJob  
1021 +        """
1022 +        if (self.datasetPath and not (self.dataset_pu or self.useParent==1)) :
1023 +          # VERIFY PROCESSED DATA
1024 +            txt += '        echo ">>> Verify list of processed files:"\n'
1025 +            txt += '        echo $InputFiles |tr -d \'\\\\\' |tr \',\' \'\\n\'|tr -d \'"\' > input-files.txt\n'
1026 +            txt += '        python $RUNTIME_AREA/parseCrabFjr.py --input $RUNTIME_AREA/crab_fjr_$NJob.xml --lfn > processed-files.txt\n'
1027 +            txt += '        cat input-files.txt  | sort | uniq > tmp.txt\n'
1028 +            txt += '        mv tmp.txt input-files.txt\n'
1029 +            txt += '        echo "cat input-files.txt"\n'
1030 +            txt += '        echo "----------------------"\n'
1031 +            txt += '        cat input-files.txt\n'
1032 +            txt += '        cat processed-files.txt | sort | uniq > tmp.txt\n'
1033 +            txt += '        mv tmp.txt processed-files.txt\n'
1034 +            txt += '        echo "----------------------"\n'
1035 +            txt += '        echo "cat processed-files.txt"\n'
1036 +            txt += '        echo "----------------------"\n'
1037 +            txt += '        cat processed-files.txt\n'
1038 +            txt += '        echo "----------------------"\n'
1039 +            txt += '        diff -qbB input-files.txt processed-files.txt\n'
1040 +            txt += '        fileverify_status=$?\n'
1041 +            txt += '        if [ $fileverify_status -ne 0 ]; then\n'
1042 +            txt += '            executable_exit_status=30001\n'
1043 +            txt += '            echo "ERROR ==> not all input files processed"\n'
1044 +            txt += '            echo "      ==> list of processed files from crab_fjr.xml differs from list in pset.cfg"\n'
1045 +            txt += '            echo "      ==> diff input-files.txt processed-files.txt"\n'
1046 +            txt += '        fi\n'
1047 +        """
1048 +        txt += '    fi\n'
1049 +        txt += 'else\n'
1050 +        txt += '    echo "CRAB FrameworkJobReport crab_fjr.xml is not available, using exit code of executable from command line."\n'
1051          txt += 'fi\n'
1052          txt += '\n'
1053 +        txt += 'if [ $executable_exit_status -ne 0 ] && [ $executable_exit_status -ne 50115 ] && [ $executable_exit_status -ne 50117 ] && [ $executable_exit_status -ne 30001 ];then\n'
1054 +        txt += '    echo ">>> Executable failed  $executable_exit_status"\n'
1055 +        txt += '    echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1056 +        txt += '    echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1057 +        txt += '    job_exit_code=$executable_exit_status\n'
1058 +        txt += '    func_exit\n'
1059 +        txt += 'fi\n\n'
1060 +        txt += 'echo "ExeExitCode=$executable_exit_status" | tee -a $RUNTIME_AREA/$repo\n'
1061 +        txt += 'echo "EXECUTABLE_EXIT_STATUS = $executable_exit_status"\n'
1062 +        txt += 'job_exit_code=$executable_exit_status\n'
1063 +
1064          return txt
1065  
1066      def setParam_(self, param, value):
# Line 1481 | Line 1069 | class Cmssw(JobType):
1069      def getParams(self):
1070          return self._params
1071  
1072 <    def setTaskid_(self):
1485 <        self._taskId = self.cfg_params['taskId']
1486 <        
1487 <    def getTaskid(self):
1488 <        return self._taskId
1489 <
1490 <    def uniquelist(self, old):
1491 <        """
1492 <        remove duplicates from a list
1493 <        """
1494 <        nd={}
1495 <        for e in old:
1496 <            nd[e]=0
1497 <        return nd.keys()
1498 <
1499 <
1500 <    def checkOut(self, limit):
1072 >    def outList(self,list=False):
1073          """
1074          check the dimension of the output files
1075          """
1076 <        txt = 'echo "*****************************************"\n'
1077 <        txt += 'echo "** Starting output sandbox limit check **"\n'
1506 <        txt += 'echo "*****************************************"\n'
1507 <        allOutFiles = ""
1076 >        txt = ''
1077 >        txt += 'echo ">>> list of expected files on output sandbox"\n'
1078          listOutFiles = []
1079 <        for fileOut in (self.output_file+self.output_file_sandbox):
1080 <             if fileOut.find('crab_fjr') == -1:
1081 <                 allOutFiles = allOutFiles + " " + self.numberFile_(fileOut, '$NJob')
1082 <                 listOutFiles.append(self.numberFile_(fileOut, '$NJob'))
1083 <        txt += 'echo "OUTPUT files: '+str(allOutFiles)+'";\n'
1084 <        txt += 'ls -gGhrta;\n'
1085 <        txt += 'sum=0;\n'
1086 <        txt += 'for file in '+str(allOutFiles)+' ; do\n'
1087 <        txt += '    if [ -e $file ]; then\n'
1088 <        txt += '        tt=`ls -gGrta $file | awk \'{ print $3 }\'`\n'
1089 <        txt += '        sum=`expr $sum + $tt`\n'
1090 <        txt += '    else\n'
1091 <        txt += '        echo "WARNING: output file $file not found!"\n'
1092 <        txt += '    fi\n'
1093 <        txt += 'done\n'
1094 <        txt += 'echo "Total Output dimension: $sum";\n'
1095 <        txt += 'limit='+str(limit)+';\n'
1096 <        txt += 'echo "OUTPUT FILES LIMIT SET TO: $limit";\n'
1097 <        txt += 'if [ $limit -lt $sum ]; then\n'
1098 <        txt += '    echo "WARNING: output files have to big size - something will be lost;"\n'
1099 <        txt += '    echo "         checking the output file sizes..."\n'
1530 <        """
1531 <        txt += '    dim=0;\n'
1532 <        txt += '    exclude=0;\n'
1533 <        txt += '    for files in '+str(allOutFiles)+' ; do\n'
1534 <        txt += '        sumTemp=0;\n'
1535 <        txt += '        for file2 in '+str(allOutFiles)+' ; do\n'
1536 <        txt += '            if [ $file != $file2 ]; then\n'
1537 <        txt += '                tt=`ls -gGrta $file2 | awk \'{ print $3 }\';`\n'
1538 <        txt += '                sumTemp=`expr $sumTemp + $tt`;\n'
1539 <        txt += '            fi\n'
1540 <        txt += '        done\n'
1541 <        txt += '        if [ $sumTemp -lt $limit ]; then\n'
1542 <        txt += '            if [ $dim -lt $sumTemp ]; then\n'
1543 <        txt += '                dim=$sumTemp;\n'
1544 <        txt += '                exclude=$file;\n'
1545 <        txt += '            fi\n'
1546 <        txt += '        fi\n'
1547 <        txt += '    done\n'
1548 <        txt += '    echo "Dimension calculated: $dim"; echo "File to exclude: $exclude";\n'
1549 <        """
1550 <        txt += '    tot=0;\n'
1551 <        txt += '    for file2 in '+str(allOutFiles)+' ; do\n'
1552 <        txt += '        tt=`ls -gGrta $file2 | awk \'{ print $3 }\';`\n'
1553 <        txt += '        tot=`expr $tot + $tt`;\n'
1554 <        txt += '        if [ $limit -lt $tot ]; then\n'
1555 <        txt += '            tot=`expr $tot - $tt`;\n'
1556 <        txt += '            fileLast=$file;\n'
1557 <        txt += '            break;\n'
1558 <        txt += '        fi\n'
1559 <        txt += '    done\n'
1560 <        txt += '    echo "Dimension calculated: $tot"; echo "First file to exclude: $file";\n'
1561 <        txt += '    flag=0;\n'    
1562 <        txt += '    for filess in '+str(allOutFiles)+' ; do\n'
1563 <        txt += '        if [ $fileLast = $filess ]; then\n'
1564 <        txt += '            flag=1;\n'
1565 <        txt += '        fi\n'
1566 <        txt += '        if [ $flag -eq 1 ]; then\n'
1567 <        txt += '            rm -f $filess;\n'
1568 <        txt += '        fi\n'
1569 <        txt += '    done\n'
1570 <        txt += '    ls -agGhrt;\n'
1571 <        txt += '    echo "WARNING: output files are too big in dimension: can not put in the output_sandbox.";\n'
1572 <        txt += '    echo "JOB_EXIT_STATUS = 70000";\n'
1573 <        txt += '    exit_status=70000;\n'
1574 <        txt += 'else'
1575 <        txt += '    echo "Total Output dimension $sum is fine.";\n'
1576 <        txt += 'fi\n'
1577 <        txt += 'echo "*****************************************"\n'
1578 <        txt += 'echo "*** Ending output sandbox limit check ***"\n'
1579 <        txt += 'echo "*****************************************"\n'
1079 >        stdout = 'CMSSW_$NJob.stdout'
1080 >        stderr = 'CMSSW_$NJob.stderr'
1081 >        if len(self.output_file) <= 0:
1082 >            msg ="WARNING: no output files name have been defined!!\n"
1083 >            msg+="\tno output files will be reported back/staged\n"
1084 >            common.logger.message(msg)
1085 >        if (self.return_data == 1):
1086 >            for file in (self.output_file+self.output_file_sandbox):
1087 >                listOutFiles.append(numberFile(file, '$NJob'))
1088 >            listOutFiles.append(stdout)
1089 >            listOutFiles.append(stderr)
1090 >        else:
1091 >            for file in (self.output_file_sandbox):
1092 >                listOutFiles.append(numberFile(file, '$NJob'))
1093 >            listOutFiles.append(stdout)
1094 >            listOutFiles.append(stderr)
1095 >        txt += 'echo "output files: '+string.join(listOutFiles,' ')+'"\n'
1096 >        txt += 'filesToCheck="'+string.join(listOutFiles,' ')+'"\n'
1097 >        txt += 'export filesToCheck\n'
1098 >
1099 >        if list : return self.output_file
1100          return txt

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines