ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.1 by slacapra, Thu Apr 6 16:18:17 2006 UTC vs.
Revision 1.93 by fanzago, Tue Jun 19 17:22:21 2007 UTC

# Line 3 | Line 3 | from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5   import common
6
7 import DataDiscovery
8 import DataLocation
6   import Scram
7  
8 < import os, string, re
8 > import os, string, re, shutil, glob
9  
10   class Cmssw(JobType):
11 <    def __init__(self, cfg_params):
11 >    def __init__(self, cfg_params, ncjobs):
12          JobType.__init__(self, 'CMSSW')
13          common.logger.debug(3,'CMSSW::__init__')
14  
15 <        self.analisys_common_info = {}
15 >        # Marco.
16 >        self._params = {}
17 >        self.cfg_params = cfg_params
18 >
19 >        try:
20 >            self.MaxTarBallSize = float(self.cfg_params['EDG.maxtarballsize'])
21 >        except KeyError:
22 >            self.MaxTarBallSize = 9.5
23 >
24 >        # number of jobs requested to be created, limit obj splitting
25 >        self.ncjobs = ncjobs
26  
27          log = common.logger
28          
29          self.scram = Scram.Scram(cfg_params)
23        scramArea = ''
30          self.additional_inbox_files = []
31          self.scriptExe = ''
32          self.executable = ''
33 +        self.executable_arch = self.scram.getArch()
34          self.tgz_name = 'default.tgz'
35 +        self.scriptName = 'CMSSW.sh'
36 +        self.pset = ''      #scrip use case Da  
37 +        self.datasetPath = '' #scrip use case Da
38 +
39 +        # set FJR file name
40 +        self.fjrFileName = 'crab_fjr.xml'
41  
42          self.version = self.scram.getSWVersion()
43 <        common.analisys_common_info['sw_version'] = self.version
43 >        common.taskDB.setDict('codeVersion',self.version)
44 >        self.setParam_('application', self.version)
45  
46          ### collect Data cards
47 +
48 +        ## get DBS mode
49          try:
50 <            self.owner = cfg_params['CMSSW.owner']
35 <            log.debug(6, "CMSSW::CMSSW(): owner = "+self.owner)
36 <            self.dataset = cfg_params['CMSSW.dataset']
37 <            log.debug(6, "CMSSW::CMSSW(): dataset = "+self.dataset)
50 >            self.use_dbs_1 = int(self.cfg_params['CMSSW.use_dbs_1'])
51          except KeyError:
52 <            msg = "Error: owner and/or dataset not defined "
52 >            self.use_dbs_1 = 0
53 >            
54 >        try:
55 >            tmp =  cfg_params['CMSSW.datasetpath']
56 >            log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
57 >            if string.lower(tmp)=='none':
58 >                self.datasetPath = None
59 >                self.selectNoInput = 1
60 >            else:
61 >                self.datasetPath = tmp
62 >                self.selectNoInput = 0
63 >        except KeyError:
64 >            msg = "Error: datasetpath not defined "  
65              raise CrabException(msg)
66  
67 +        # ML monitoring
68 +        # split dataset path style: /PreProdR3Minbias/SIM/GEN-SIM
69 +        if not self.datasetPath:
70 +            self.setParam_('dataset', 'None')
71 +            self.setParam_('owner', 'None')
72 +        else:
73 +            try:
74 +                datasetpath_split = self.datasetPath.split("/")
75 +                # standard style
76 +                if self.use_dbs_1 == 1 :
77 +                    self.setParam_('dataset', datasetpath_split[1])
78 +                    self.setParam_('owner', datasetpath_split[-1])
79 +                else:
80 +                    self.setParam_('dataset', datasetpath_split[1])
81 +                    self.setParam_('owner', datasetpath_split[2])
82 +            except:
83 +                self.setParam_('dataset', self.datasetPath)
84 +                self.setParam_('owner', self.datasetPath)
85 +                
86 +        self.setTaskid_()
87 +        self.setParam_('taskId', self.cfg_params['taskId'])
88 +
89          self.dataTiers = []
43        try:
44            tmpDataTiers = string.split(cfg_params['CMSSW.data_tier'],',')
45            for tmp in tmpDataTiers:
46                tmp=string.strip(tmp)
47                self.dataTiers.append(tmp)
48                pass
49            pass
50        except KeyError:
51            pass
52        log.debug(6, "Cmssw::Cmssw(): dataTiers = "+str(self.dataTiers))
90  
91          ## now the application
92          try:
93              self.executable = cfg_params['CMSSW.executable']
94 +            self.setParam_('exe', self.executable)
95              log.debug(6, "CMSSW::CMSSW(): executable = "+self.executable)
96              msg = "Default executable cmsRun overridden. Switch to " + self.executable
97              log.debug(3,msg)
98          except KeyError:
99              self.executable = 'cmsRun'
100 +            self.setParam_('exe', self.executable)
101              msg = "User executable not defined. Use cmsRun"
102              log.debug(3,msg)
103              pass
# Line 66 | Line 105 | class Cmssw(JobType):
105          try:
106              self.pset = cfg_params['CMSSW.pset']
107              log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
108 <            if (not os.path.exists(self.pset)):
109 <                raise CrabException("User defined PSet file "+self.pset+" does not exist")
108 >            if self.pset.lower() != 'none' :
109 >                if (not os.path.exists(self.pset)):
110 >                    raise CrabException("User defined PSet file "+self.pset+" does not exist")
111 >            else:
112 >                self.pset = None
113          except KeyError:
114              raise CrabException("PSet file missing. Cannot run cmsRun ")
115  
116          # output files
117 +        ## stuff which must be returned always via sandbox
118 +        self.output_file_sandbox = []
119 +
120 +        # add fjr report by default via sandbox
121 +        self.output_file_sandbox.append(self.fjrFileName)
122 +
123 +        # other output files to be returned via sandbox or copied to SE
124          try:
125              self.output_file = []
77
126              tmp = cfg_params['CMSSW.output_file']
127              if tmp != '':
128                  tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
# Line 83 | Line 131 | class Cmssw(JobType):
131                      tmp=string.strip(tmp)
132                      self.output_file.append(tmp)
133                      pass
86
134              else:
135 <                log.message("No output file defined: only stdout/err will be available")
135 >                log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
136                  pass
137              pass
138          except KeyError:
139 <            log.message("No output file defined: only stdout/err will be available")
139 >            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
140              pass
141  
142          # script_exe file as additional file in inputSandbox
143          try:
144 <           self.scriptExe = cfg_params['CMSSW.script_exe']
145 <           self.additional_inbox_files.append(self.scriptExe)
144 >            self.scriptExe = cfg_params['USER.script_exe']
145 >            if self.scriptExe != '':
146 >               if not os.path.isfile(self.scriptExe):
147 >                  msg ="ERROR. file "+self.scriptExe+" not found"
148 >                  raise CrabException(msg)
149 >               self.additional_inbox_files.append(string.strip(self.scriptExe))
150          except KeyError:
151 <           pass
152 <        if self.scriptExe != '':
153 <           if os.path.isfile(self.scriptExe):
154 <              pass
155 <           else:
156 <              log.message("WARNING. file "+self.scriptExe+" not found")
157 <              sys.exit()
107 <                  
151 >            self.scriptExe = ''
152 >
153 >        #CarlosDaniele
154 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
155 >           msg ="Error. script_exe  not defined"
156 >           raise CrabException(msg)
157 >
158          ## additional input files
159          try:
160 <            tmpAddFiles = string.split(cfg_params['CMSSW.additional_input_files'],',')
160 >            tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
161              for tmp in tmpAddFiles:
162 <                tmp=string.strip(tmp)
163 <                self.additional_inbox_files.append(tmp)
162 >                tmp = string.strip(tmp)
163 >                dirname = ''
164 >                if not tmp[0]=="/": dirname = "."
165 >                files = []
166 >                if string.find(tmp,"*")>-1:
167 >                    files = glob.glob(os.path.join(dirname, tmp))
168 >                    if len(files)==0:
169 >                        raise CrabException("No additional input file found with this pattern: "+tmp)
170 >                else:
171 >                    files.append(tmp)
172 >                for file in files:
173 >                    if not os.path.exists(file):
174 >                        raise CrabException("Additional input file not found: "+file)
175 >                    pass
176 >                    fname = string.split(file, '/')[-1]
177 >                    storedFile = common.work_space.pathForTgz()+'share/'+fname
178 >                    shutil.copyfile(file, storedFile)
179 >                    self.additional_inbox_files.append(string.strip(storedFile))
180                  pass
181              pass
182 +            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
183          except KeyError:
184              pass
185  
186 +        # files per job
187 +        try:
188 +            if (cfg_params['CMSSW.files_per_jobs']):
189 +                raise CrabException("files_per_jobs no longer supported.  Quitting.")
190 +        except KeyError:
191 +            pass
192 +
193 +        ## Events per job
194 +        try:
195 +            self.eventsPerJob =int( cfg_params['CMSSW.events_per_job'])
196 +            self.selectEventsPerJob = 1
197 +        except KeyError:
198 +            self.eventsPerJob = -1
199 +            self.selectEventsPerJob = 0
200 +    
201 +        ## number of jobs
202 +        try:
203 +            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
204 +            self.selectNumberOfJobs = 1
205 +        except KeyError:
206 +            self.theNumberOfJobs = 0
207 +            self.selectNumberOfJobs = 0
208 +
209          try:
210              self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
211 +            self.selectTotalNumberEvents = 1
212          except KeyError:
213 <            msg = 'Must define total_number_of_events and job_number_of_events'
214 <            raise CrabException(msg)
215 <            
216 < #Marco: FirstEvent is nolonger used inside PSet
217 < #        try:
218 < #            self.first = int(cfg_params['CMSSW.first_event'])
219 < #        except KeyError:
220 < #            self.first = 0
221 < #            pass
222 < #        log.debug(6, "Orca::Orca(): total number of events = "+`self.total_number_of_events`)
223 <        #log.debug(6, "Orca::Orca(): events per job = "+`self.job_number_of_events`)
224 < #        log.debug(6, "Orca::Orca(): first event = "+`self.first`)
225 <        
226 <        CEBlackList = []
227 <        try:
137 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
138 <            for tmp in tmpBad:
139 <                tmp=string.strip(tmp)
140 <                CEBlackList.append(tmp)
213 >            self.total_number_of_events = 0
214 >            self.selectTotalNumberEvents = 0
215 >
216 >        if self.pset != None: #CarlosDaniele
217 >             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
218 >                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
219 >                 raise CrabException(msg)
220 >        else:
221 >             if (self.selectNumberOfJobs == 0):
222 >                 msg = 'Must specify  number_of_jobs.'
223 >                 raise CrabException(msg)
224 >
225 >        ## source seed for pythia
226 >        try:
227 >            self.sourceSeed = int(cfg_params['CMSSW.pythia_seed'])
228          except KeyError:
229 <            pass
229 >            self.sourceSeed = None
230 >            common.logger.debug(5,"No seed given")
231  
232 <        self.reCEBlackList=[]
233 <        for bad in CEBlackList:
146 <            self.reCEBlackList.append(re.compile( bad ))
147 <
148 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
149 <
150 <        CEWhiteList = []
151 <        try:
152 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
153 <            #tmpGood = ['cern']
154 <            for tmp in tmpGood:
155 <                tmp=string.strip(tmp)
156 <                #if (tmp == 'cnaf'): tmp = 'webserver' ########## warning: temp. patch
157 <                CEWhiteList.append(tmp)
232 >        try:
233 >            self.sourceSeedVtx = int(cfg_params['CMSSW.vtx_seed'])
234          except KeyError:
235 <            pass
235 >            self.sourceSeedVtx = None
236 >            common.logger.debug(5,"No vertex seed given")
237 >
238 >        try:
239 >            self.sourceSeedG4 = int(cfg_params['CMSSW.g4_seed'])
240 >        except KeyError:
241 >            self.sourceSeedG4 = None
242 >            common.logger.debug(5,"No g4 sim hits seed given")
243  
244 <        #print 'CEWhiteList: ',CEWhiteList
245 <        self.reCEWhiteList=[]
246 <        for Good in CEWhiteList:
247 <            self.reCEWhiteList.append(re.compile( Good ))
244 >        try:
245 >            self.sourceSeedMix = int(cfg_params['CMSSW.mix_seed'])
246 >        except KeyError:
247 >            self.sourceSeedMix = None
248 >            common.logger.debug(5,"No mix seed given")
249  
250 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
250 >        try:
251 >            self.firstRun = int(cfg_params['CMSSW.first_run'])
252 >        except KeyError:
253 >            self.firstRun = None
254 >            common.logger.debug(5,"No first run given")
255 >        if self.pset != None: #CarlosDaniele
256 >            import PsetManipulator  
257 >            PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
258  
259          #DBSDLS-start
260          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
261          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
262          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
263 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
264          ## Perform the data location and discovery (based on DBS/DLS)
265 <        self.DataDiscoveryAndLocation(cfg_params)
265 >        ## SL: Don't if NONE is specified as input (pythia use case)
266 >        blockSites = {}
267 >        if self.datasetPath:
268 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
269          #DBSDLS-end          
270  
271          self.tgzNameWithPath = self.getTarBall(self.executable)
272 +    
273 +        ## Select Splitting
274 +        if self.selectNoInput:
275 +            if self.pset == None: #CarlosDaniele
276 +                self.jobSplittingForScript()
277 +            else:
278 +                self.jobSplittingNoInput()
279 +        else:
280 +            self.jobSplittingByBlocks(blockSites)
281 +
282 +        # modify Pset
283 +        if self.pset != None: #CarlosDaniele
284 +            try:
285 +                if (self.datasetPath): # standard job
286 +                    # allow to processa a fraction of events in a file
287 +                    PsetEdit.inputModule("INPUT")
288 +                    PsetEdit.maxEvent("INPUTMAXEVENTS")
289 +                    PsetEdit.skipEvent("INPUTSKIPEVENTS")
290 +                else:  # pythia like job
291 +                    PsetEdit.maxEvent(self.eventsPerJob)
292 +                    if (self.firstRun):
293 +                        PsetEdit.pythiaFirstRun("INPUTFIRSTRUN")  #First Run
294 +                    if (self.sourceSeed) :
295 +                        PsetEdit.pythiaSeed("INPUT")
296 +                        if (self.sourceSeedVtx) :
297 +                            PsetEdit.vtxSeed("INPUTVTX")
298 +                        if (self.sourceSeedG4) :
299 +                            self.PsetEdit.g4Seed("INPUTG4")
300 +                        if (self.sourceSeedMix) :
301 +                            self.PsetEdit.mixSeed("INPUTMIX")
302 +                # add FrameworkJobReport to parameter-set
303 +                PsetEdit.addCrabFJR(self.fjrFileName)
304 +                PsetEdit.psetWriter(self.configFilename())
305 +            except:
306 +                msg='Error while manipuliating ParameterSet: exiting...'
307 +                raise CrabException(msg)
308  
309      def DataDiscoveryAndLocation(self, cfg_params):
310  
311 <        fun = "CMSSW::DataDiscoveryAndLocation()"
311 >        import DataDiscovery
312 >        import DataDiscovery_DBS2
313 >        import DataLocation
314 >        common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
315 >
316 >        datasetPath=self.datasetPath
317  
318          ## Contact the DBS
319 +        common.logger.message("Contacting Data Discovery Services ...")
320          try:
321 <            self.pubdata=DataDiscovery.DataDiscovery(self.owner,
322 <                                                     self.dataset,
323 <                                                     self.dataTiers,
324 <                                                     cfg_params)
321 >
322 >            if self.use_dbs_1 == 1 :
323 >                self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
324 >            else :
325 >                self.pubdata=DataDiscovery_DBS2.DataDiscovery_DBS2(datasetPath, cfg_params)
326              self.pubdata.fetchDBSInfo()
327  
328          except DataDiscovery.NotExistingDatasetError, ex :
329              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
330              raise CrabException(msg)
193
331          except DataDiscovery.NoDataTierinProvenanceError, ex :
332              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
333              raise CrabException(msg)
334          except DataDiscovery.DataDiscoveryError, ex:
335 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
335 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
336 >            raise CrabException(msg)
337 >        except DataDiscovery_DBS2.NotExistingDatasetError_DBS2, ex :
338 >            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
339 >            raise CrabException(msg)
340 >        except DataDiscovery_DBS2.NoDataTierinProvenanceError_DBS2, ex :
341 >            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
342 >            raise CrabException(msg)
343 >        except DataDiscovery_DBS2.DataDiscoveryError_DBS2, ex:
344 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
345              raise CrabException(msg)
346  
347 <        ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
348 <        self.DBSPaths=self.pubdata.getDBSPaths()
349 <        common.logger.message("Required data are : ")
204 <        for path in self.DBSPaths:
205 <            common.logger.message(" --> "+path )
347 >        self.filesbyblock=self.pubdata.getFiles()
348 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
349 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
350  
351          ## get max number of events
208        common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
352          self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
210        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
211
212        ## get fileblocks corresponding to the required data
213        fb=self.pubdata.getFileBlocks()
214        common.logger.debug(5,"fileblocks are %s"%fb)
353  
354          ## Contact the DLS and build a list of sites hosting the fileblocks
355          try:
356 <            dataloc=DataLocation.DataLocation(self.pubdata.getFileBlocks(),cfg_params)
356 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
357              dataloc.fetchDLSInfo()
358          except DataLocation.DataLocationError , ex:
359              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
360              raise CrabException(msg)
361          
224        allsites=dataloc.getSites()
225        common.logger.debug(5,"sites are %s"%allsites)
226        sites=self.checkBlackList(allsites)
227        common.logger.debug(5,"sites are (after black list) %s"%sites)
228        sites=self.checkWhiteList(sites)
229        common.logger.debug(5,"sites are (after white list) %s"%sites)
362  
363 <        if len(sites)==0:
364 <            msg = 'No sites hosting all the needed data! Exiting... '
365 <            raise CrabException(msg)
366 <        common.logger.message("List of Sites hosting the data : "+str(sites))
367 <        common.logger.debug(6, "List of Sites: "+str(sites))
368 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
369 <        return
363 >        sites = dataloc.getSites()
364 >        allSites = []
365 >        listSites = sites.values()
366 >        for listSite in listSites:
367 >            for oneSite in listSite:
368 >                allSites.append(oneSite)
369 >        allSites = self.uniquelist(allSites)
370 >
371 >        # screen output
372 >        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
373 >
374 >        return sites
375 >    
376 >    def jobSplittingByBlocks(self, blockSites):
377 >        """
378 >        Perform job splitting. Jobs run over an integer number of files
379 >        and no more than one block.
380 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
381 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
382 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
383 >                  self.maxEvents, self.filesbyblock
384 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
385 >              self.total_number_of_jobs - Total # of jobs
386 >              self.list_of_args - File(s) job will run on (a list of lists)
387 >        """
388 >
389 >        # ---- Handle the possible job splitting configurations ---- #
390 >        if (self.selectTotalNumberEvents):
391 >            totalEventsRequested = self.total_number_of_events
392 >        if (self.selectEventsPerJob):
393 >            eventsPerJobRequested = self.eventsPerJob
394 >            if (self.selectNumberOfJobs):
395 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
396 >
397 >        # If user requested all the events in the dataset
398 >        if (totalEventsRequested == -1):
399 >            eventsRemaining=self.maxEvents
400 >        # If user requested more events than are in the dataset
401 >        elif (totalEventsRequested > self.maxEvents):
402 >            eventsRemaining = self.maxEvents
403 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
404 >        # If user requested less events than are in the dataset
405 >        else:
406 >            eventsRemaining = totalEventsRequested
407 >
408 >        # If user requested more events per job than are in the dataset
409 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
410 >            eventsPerJobRequested = self.maxEvents
411 >
412 >        # For user info at end
413 >        totalEventCount = 0
414 >
415 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
416 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
417 >
418 >        if (self.selectNumberOfJobs):
419 >            common.logger.message("May not create the exact number_of_jobs requested.")
420 >
421 >        if ( self.ncjobs == 'all' ) :
422 >            totalNumberOfJobs = 999999999
423 >        else :
424 >            totalNumberOfJobs = self.ncjobs
425 >            
426 >
427 >        blocks = blockSites.keys()
428 >        blockCount = 0
429 >        # Backup variable in case self.maxEvents counted events in a non-included block
430 >        numBlocksInDataset = len(blocks)
431 >
432 >        jobCount = 0
433 >        list_of_lists = []
434 >
435 >        # list tracking which jobs are in which jobs belong to which block
436 >        jobsOfBlock = {}
437 >
438 >        # ---- Iterate over the blocks in the dataset until ---- #
439 >        # ---- we've met the requested total # of events    ---- #
440 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
441 >            block = blocks[blockCount]
442 >            blockCount += 1
443 >            
444 >            if self.eventsbyblock.has_key(block) :
445 >                numEventsInBlock = self.eventsbyblock[block]
446 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
447 >            
448 >                files = self.filesbyblock[block]
449 >                numFilesInBlock = len(files)
450 >                if (numFilesInBlock <= 0):
451 >                    continue
452 >                fileCount = 0
453 >
454 >                # ---- New block => New job ---- #
455 >                parString = "\\{"
456 >                # counter for number of events in files currently worked on
457 >                filesEventCount = 0
458 >                # flag if next while loop should touch new file
459 >                newFile = 1
460 >                # job event counter
461 >                jobSkipEventCount = 0
462 >            
463 >                # ---- Iterate over the files in the block until we've met the requested ---- #
464 >                # ---- total # of events or we've gone over all the files in this block  ---- #
465 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
466 >                    file = files[fileCount]
467 >                    if newFile :
468 >                        try:
469 >                            numEventsInFile = self.eventsbyfile[file]
470 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
471 >                            # increase filesEventCount
472 >                            filesEventCount += numEventsInFile
473 >                            # Add file to current job
474 >                            parString += '\\\"' + file + '\\\"\,'
475 >                            newFile = 0
476 >                        except KeyError:
477 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
478 >                        
479 >
480 >                    # if less events in file remain than eventsPerJobRequested
481 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) :
482 >                        # if last file in block
483 >                        if ( fileCount == numFilesInBlock-1 ) :
484 >                            # end job using last file, use remaining events in block
485 >                            # close job and touch new file
486 >                            fullString = parString[:-2]
487 >                            fullString += '\\}'
488 >                            list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
489 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
490 >                            self.jobDestination.append(blockSites[block])
491 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
492 >                            # fill jobs of block dictionary
493 >                            if block in jobsOfBlock.keys() :
494 >                                jobsOfBlock[block].append(jobCount+1)
495 >                            else:
496 >                                jobsOfBlock[block] = [jobCount+1]
497 >                            # reset counter
498 >                            jobCount = jobCount + 1
499 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
500 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
501 >                            jobSkipEventCount = 0
502 >                            # reset file
503 >                            parString = "\\{"
504 >                            filesEventCount = 0
505 >                            newFile = 1
506 >                            fileCount += 1
507 >                        else :
508 >                            # go to next file
509 >                            newFile = 1
510 >                            fileCount += 1
511 >                    # if events in file equal to eventsPerJobRequested
512 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
513 >                        # close job and touch new file
514 >                        fullString = parString[:-2]
515 >                        fullString += '\\}'
516 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
517 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
518 >                        self.jobDestination.append(blockSites[block])
519 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
520 >                        if block in jobsOfBlock.keys() :
521 >                            jobsOfBlock[block].append(jobCount+1)
522 >                        else:
523 >                            jobsOfBlock[block] = [jobCount+1]
524 >                        # reset counter
525 >                        jobCount = jobCount + 1
526 >                        totalEventCount = totalEventCount + eventsPerJobRequested
527 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
528 >                        jobSkipEventCount = 0
529 >                        # reset file
530 >                        parString = "\\{"
531 >                        filesEventCount = 0
532 >                        newFile = 1
533 >                        fileCount += 1
534 >                        
535 >                    # if more events in file remain than eventsPerJobRequested
536 >                    else :
537 >                        # close job but don't touch new file
538 >                        fullString = parString[:-2]
539 >                        fullString += '\\}'
540 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
541 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
542 >                        self.jobDestination.append(blockSites[block])
543 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
544 >                        if block in jobsOfBlock.keys() :
545 >                            jobsOfBlock[block].append(jobCount+1)
546 >                        else:
547 >                            jobsOfBlock[block] = [jobCount+1]
548 >                        # increase counter
549 >                        jobCount = jobCount + 1
550 >                        totalEventCount = totalEventCount + eventsPerJobRequested
551 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
552 >                        # calculate skip events for last file
553 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
554 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
555 >                        # remove all but the last file
556 >                        filesEventCount = self.eventsbyfile[file]
557 >                        parString = "\\{"
558 >                        parString += '\\\"' + file + '\\\"\,'
559 >                    pass # END if
560 >                pass # END while (iterate over files in the block)
561 >        pass # END while (iterate over blocks in the dataset)
562 >        self.ncjobs = self.total_number_of_jobs = jobCount
563 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
564 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
565 >        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
566          
567 <    def checkBlackList(self, allSites):
568 <        if len(self.reCEBlackList)==0: return allSites
569 <        sites = []
570 <        for site in allSites:
571 <            common.logger.debug(10,'Site '+site)
572 <            good=1
573 <            for re in self.reCEBlackList:
574 <                if re.search(site):
575 <                    common.logger.message('CE in black list, skipping site '+site)
576 <                    good=0
567 >        # screen output
568 >        screenOutput = "List of jobs and available destination sites:\n\n"
569 >
570 >        blockCounter = 0
571 >        for block in jobsOfBlock.keys():
572 >            blockCounter += 1
573 >            screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),','.join(blockSites[block]))
574 >
575 >        common.logger.message(screenOutput)
576 >
577 >        self.list_of_args = list_of_lists
578 >        return
579 >
580 >    def jobSplittingNoInput(self):
581 >        """
582 >        Perform job splitting based on number of event per job
583 >        """
584 >        common.logger.debug(5,'Splitting per events')
585 >        common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
586 >        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
587 >        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
588 >
589 >        if (self.total_number_of_events < 0):
590 >            msg='Cannot split jobs per Events with "-1" as total number of events'
591 >            raise CrabException(msg)
592 >
593 >        if (self.selectEventsPerJob):
594 >            if (self.selectTotalNumberEvents):
595 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
596 >            elif(self.selectNumberOfJobs) :  
597 >                self.total_number_of_jobs =self.theNumberOfJobs
598 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
599 >
600 >        elif (self.selectNumberOfJobs) :
601 >            self.total_number_of_jobs = self.theNumberOfJobs
602 >            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
603 >
604 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
605 >
606 >        # is there any remainder?
607 >        check = int(self.total_number_of_events) - (int(self.total_number_of_jobs)*self.eventsPerJob)
608 >
609 >        common.logger.debug(5,'Check  '+str(check))
610 >
611 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
612 >        if check > 0:
613 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
614 >
615 >        # argument is seed number.$i
616 >        self.list_of_args = []
617 >        for i in range(self.total_number_of_jobs):
618 >            ## Since there is no input, any site is good
619 >           # self.jobDestination.append(["Any"])
620 >            self.jobDestination.append([""]) #must be empty to write correctly the xml
621 >            args=[]
622 >            if (self.firstRun):
623 >                    ## pythia first run
624 >                #self.list_of_args.append([(str(self.firstRun)+str(i))])
625 >                args.append(str(self.firstRun)+str(i))
626 >            else:
627 >                ## no first run
628 >                #self.list_of_args.append([str(i)])
629 >                args.append(str(i))
630 >            if (self.sourceSeed):
631 >                args.append(str(self.sourceSeed)+str(i))
632 >                if (self.sourceSeedVtx):
633 >                    ## + vtx random seed
634 >                    args.append(str(self.sourceSeedVtx)+str(i))
635 >                if (self.sourceSeedG4):
636 >                    ## + G4 random seed
637 >                    args.append(str(self.sourceSeedG4)+str(i))
638 >                if (self.sourceSeedMix):    
639 >                    ## + Mix random seed
640 >                    args.append(str(self.sourceSeedMix)+str(i))
641                  pass
642 <            if good: sites.append(site)
643 <        if len(sites) == 0:
644 <            common.logger.debug(3,"No sites found after BlackList")
645 <        return sites
642 >            pass
643 >            self.list_of_args.append(args)
644 >        pass
645 >            
646 >        # print self.list_of_args
647  
648 <    def checkWhiteList(self, allsites):
648 >        return
649  
650 <        if len(self.reCEWhiteList)==0: return pubDBUrls
651 <        sites = []
652 <        for site in allsites:
653 <            #print 'connecting to the URL ',url
654 <            good=0
655 <            for re in self.reCEWhiteList:
656 <                if re.search(site):
657 <                    common.logger.debug(5,'CE in white list, adding site '+site)
658 <                    good=1
659 <                if not good: continue
660 <                sites.append(site)
661 <        if len(sites) == 0:
662 <            common.logger.message("No sites found after WhiteList\n")
663 <        else:
664 <            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
665 <        return sites
650 >
651 >    def jobSplittingForScript(self):#CarlosDaniele
652 >        """
653 >        Perform job splitting based on number of job
654 >        """
655 >        common.logger.debug(5,'Splitting per job')
656 >        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
657 >
658 >        self.total_number_of_jobs = self.theNumberOfJobs
659 >
660 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
661 >
662 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
663 >
664 >        # argument is seed number.$i
665 >        self.list_of_args = []
666 >        for i in range(self.total_number_of_jobs):
667 >            ## Since there is no input, any site is good
668 >           # self.jobDestination.append(["Any"])
669 >            self.jobDestination.append([""])
670 >            ## no random seed
671 >            self.list_of_args.append([str(i)])
672 >        return
673 >
674 >    def split(self, jobParams):
675 >
676 >        common.jobDB.load()
677 >        #### Fabio
678 >        njobs = self.total_number_of_jobs
679 >        arglist = self.list_of_args
680 >        # create the empty structure
681 >        for i in range(njobs):
682 >            jobParams.append("")
683 >        
684 >        for job in range(njobs):
685 >            jobParams[job] = arglist[job]
686 >            # print str(arglist[job])
687 >            # print jobParams[job]
688 >            common.jobDB.setArguments(job, jobParams[job])
689 >            common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job]))
690 >            common.jobDB.setDestination(job, self.jobDestination[job])
691 >
692 >        common.jobDB.save()
693 >        return
694 >    
695 >    def getJobTypeArguments(self, nj, sched):
696 >        result = ''
697 >        for i in common.jobDB.arguments(nj):
698 >            result=result+str(i)+" "
699 >        return result
700 >  
701 >    def numberOfJobs(self):
702 >        # Fabio
703 >        return self.total_number_of_jobs
704  
705      def getTarBall(self, exe):
706          """
# Line 277 | Line 708 | class Cmssw(JobType):
708          """
709          
710          # if it exist, just return it
711 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
711 >        #
712 >        # Marco. Let's start to use relative path for Boss XML files
713 >        #
714 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
715          if os.path.exists(self.tgzNameWithPath):
716              return self.tgzNameWithPath
717  
# Line 291 | Line 725 | class Cmssw(JobType):
725          # First of all declare the user Scram area
726          swArea = self.scram.getSWArea_()
727          #print "swArea = ", swArea
728 <        swVersion = self.scram.getSWVersion()
729 <        #print "swVersion = ", swVersion
728 >        # swVersion = self.scram.getSWVersion()
729 >        # print "swVersion = ", swVersion
730          swReleaseTop = self.scram.getReleaseTop_()
731          #print "swReleaseTop = ", swReleaseTop
732          
# Line 300 | Line 734 | class Cmssw(JobType):
734          if swReleaseTop == '' or swArea == swReleaseTop:
735              return
736  
737 <        filesToBeTarred = []
738 <        ## First find the executable
739 <        if (self.executable != ''):
740 <            exeWithPath = self.scram.findFile_(executable)
741 < #           print exeWithPath
742 <            if ( not exeWithPath ):
743 <                raise CrabException('User executable '+executable+' not found')
744 <
745 <            ## then check if it's private or not
746 <            if exeWithPath.find(swReleaseTop) == -1:
747 <                # the exe is private, so we must ship
748 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
749 <                path = swArea+'/'
750 <                exe = string.replace(exeWithPath, path,'')
751 <                filesToBeTarred.append(exe)
752 <                pass
753 <            else:
754 <                # the exe is from release, we'll find it on WN
755 <                pass
756 <
757 <        ## Now get the libraries: only those in local working area
758 <        libDir = 'lib'
759 <        lib = swArea+'/' +libDir
760 <        common.logger.debug(5,"lib "+lib+" to be tarred")
761 <        if os.path.exists(lib):
762 <            filesToBeTarred.append(libDir)
763 <
764 <        ## Now check if the Data dir is present
765 <        dataDir = 'src/Data/'
766 <        if os.path.isdir(swArea+'/'+dataDir):
767 <            filesToBeTarred.append(dataDir)
768 <
769 <        ## Create the tar-ball
770 <        if len(filesToBeTarred)>0:
771 <            cwd = os.getcwd()
772 <            os.chdir(swArea)
773 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
774 <            for line in filesToBeTarred:
775 <                tarcmd = tarcmd + line + ' '
776 <            cout = runCommand(tarcmd)
777 <            if not cout:
778 <                raise CrabException('Could not create tar-ball')
779 <            os.chdir(cwd)
780 <        else:
781 <            common.logger.debug(5,"No files to be to be tarred")
737 >        import tarfile
738 >        try: # create tar ball
739 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
740 >            ## First find the executable
741 >            if (self.executable != ''):
742 >                exeWithPath = self.scram.findFile_(executable)
743 >                if ( not exeWithPath ):
744 >                    raise CrabException('User executable '+executable+' not found')
745 >    
746 >                ## then check if it's private or not
747 >                if exeWithPath.find(swReleaseTop) == -1:
748 >                    # the exe is private, so we must ship
749 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
750 >                    path = swArea+'/'
751 >                    # distinguish case when script is in user project area or given by full path somewhere else
752 >                    if exeWithPath.find(path) >= 0 :
753 >                        exe = string.replace(exeWithPath, path,'')
754 >                        tar.add(path+exe,os.path.basename(executable))
755 >                    else :
756 >                        tar.add(exeWithPath,os.path.basename(executable))
757 >                    pass
758 >                else:
759 >                    # the exe is from release, we'll find it on WN
760 >                    pass
761 >    
762 >            ## Now get the libraries: only those in local working area
763 >            libDir = 'lib'
764 >            lib = swArea+'/' +libDir
765 >            common.logger.debug(5,"lib "+lib+" to be tarred")
766 >            if os.path.exists(lib):
767 >                tar.add(lib,libDir)
768 >    
769 >            ## Now check if module dir is present
770 >            moduleDir = 'module'
771 >            module = swArea + '/' + moduleDir
772 >            if os.path.isdir(module):
773 >                tar.add(module,moduleDir)
774 >
775 >            ## Now check if any data dir(s) is present
776 >            swAreaLen=len(swArea)
777 >            for root, dirs, files in os.walk(swArea):
778 >                if "data" in dirs:
779 >                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
780 >                    tar.add(root+"/data",root[swAreaLen:]+"/data")
781 >
782 >            ## Add ProdAgent dir to tar
783 >            paDir = 'ProdAgentApi'
784 >            pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi'
785 >            if os.path.isdir(pa):
786 >                tar.add(pa,paDir)
787 >
788 >            ### FEDE FOR DBS PUBLICATION
789 >            ## Add PRODCOMMON dir to tar
790 >            prodcommonDir = 'ProdCommon'
791 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon'
792 >            if os.path.isdir(prodcommonPath):
793 >                tar.add(prodcommonPath,prodcommonDir)
794 >            #############################    
795 >        
796 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
797 >            tar.close()
798 >        except :
799 >            raise CrabException('Could not create tar-ball')
800 >
801 >        ## check for tarball size
802 >        tarballinfo = os.stat(self.tgzNameWithPath)
803 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
804 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
805 >
806 >        ## create tar-ball with ML stuff
807 >        self.MLtgzfile =  common.work_space.pathForTgz()+'share/MLfiles.tgz'
808 >        try:
809 >            tar = tarfile.open(self.MLtgzfile, "w:gz")
810 >            path=os.environ['CRABDIR'] + '/python/'
811 >            for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py']:
812 >                tar.add(path+file,file)
813 >            common.logger.debug(5,"Files added to "+self.MLtgzfile+" : "+str(tar.getnames()))
814 >            tar.close()
815 >        except :
816 >            raise CrabException('Could not create ML files tar-ball')
817          
818          return
819          
# Line 354 | Line 823 | class Cmssw(JobType):
823          the execution environment for the job 'nj'.
824          """
825          # Prepare JobType-independent part
826 <        txt = self.wsSetupCMSEnvironment_()
826 >        txt = ''
827 >  
828 >        ## OLI_Daniele at this level  middleware already known
829 >
830 >        txt += 'if [ $middleware == LCG ]; then \n'
831 >        txt += self.wsSetupCMSLCGEnvironment_()
832 >        txt += 'elif [ $middleware == OSG ]; then\n'
833 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
834 >        txt += '    echo "Created working directory: $WORKING_DIR"\n'
835 >        txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
836 >        txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
837 >        txt += '    echo "JOB_EXIT_STATUS = 10016"\n'
838 >        txt += '    echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
839 >        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
840 >        txt += '        rm -f $RUNTIME_AREA/$repo \n'
841 >        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
842 >        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
843 >        txt += '        exit 1\n'
844 >        txt += '    fi\n'
845 >        txt += '\n'
846 >        txt += '    echo "Change to working directory: $WORKING_DIR"\n'
847 >        txt += '    cd $WORKING_DIR\n'
848 >        txt += self.wsSetupCMSOSGEnvironment_()
849 >        txt += 'fi\n'
850  
851          # Prepare JobType-specific part
852          scram = self.scram.commandName()
853          txt += '\n\n'
854          txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
855 +        txt += 'echo "Setting SCRAM_ARCH='+self.executable_arch+'"\n'
856 +        txt += 'export SCRAM_ARCH='+self.executable_arch+'\n'
857          txt += scram+' project CMSSW '+self.version+'\n'
858          txt += 'status=$?\n'
859          txt += 'if [ $status != 0 ] ; then\n'
860 <        txt += '   echo "SET_EXE_ENV 1 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
861 <        txt += '   echo "JOB_EXIT_STATUS = 5"\n'
862 <        txt += '   echo "SanityCheckCode = 5" | tee -a $RUNTIME_AREA/$repo\n'
860 >        txt += '   echo "SET_EXE_ENV 10034 ==>ERROR CMSSW '+self.version+' not found on `hostname`" \n'
861 >        txt += '   echo "JOB_EXIT_STATUS = 10034"\n'
862 >        txt += '   echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n'
863          txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
864 <        txt += '   exit 5 \n'
864 >        txt += '   rm -f $RUNTIME_AREA/$repo \n'
865 >        txt += '   echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
866 >        txt += '   echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
867 >        ## OLI_Daniele
868 >        txt += '    if [ $middleware == OSG ]; then \n'
869 >        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
870 >        txt += '        cd $RUNTIME_AREA\n'
871 >        txt += '        /bin/rm -rf $WORKING_DIR\n'
872 >        txt += '        if [ -d $WORKING_DIR ] ;then\n'
873 >        txt += '        echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
874 >        txt += '        echo "JOB_EXIT_STATUS = 10018"\n'
875 >        txt += '        echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
876 >        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
877 >        txt += '            rm -f $RUNTIME_AREA/$repo \n'
878 >        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
879 >        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
880 >        txt += '        fi\n'
881 >        txt += '    fi \n'
882 >        txt += '   exit 1 \n'
883          txt += 'fi \n'
884          txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
885          txt += 'cd '+self.version+'\n'
886          ### needed grep for bug in scramv1 ###
887 +        txt += scram+' runtime -sh\n'
888          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
889 +        txt += 'echo $PATH\n'
890  
891          # Handle the arguments:
892          txt += "\n"
893 <        txt += "## ARGUMNETS: $1 Job Number\n"
380 <        # txt += "## ARGUMNETS: $2 First Event for this job\n"
381 <        # txt += "## ARGUMNETS: $3 Max Event for this job\n"
893 >        txt += "## number of arguments (first argument always jobnumber)\n"
894          txt += "\n"
895 <        txt += "narg=$#\n"
896 <        txt += "if [ $narg -lt 1 ]\n"
895 > #        txt += "narg=$#\n"
896 >        txt += "if [ $nargs -lt 2 ]\n"
897          txt += "then\n"
898 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$narg+ \n"
899 <        txt += '    echo "JOB_EXIT_STATUS = 1"\n'
900 <        txt += '    echo "SanityCheckCode = 1" | tee -a $RUNTIME_AREA/$repo\n'
898 >        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$nargs+ \n"
899 >        txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
900 >        txt += '    echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n'
901          txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
902 +        txt += '    rm -f $RUNTIME_AREA/$repo \n'
903 +        txt += '    echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
904 +        txt += '    echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
905 +        ## OLI_Daniele
906 +        txt += '    if [ $middleware == OSG ]; then \n'
907 +        txt += '        echo "Remove working directory: $WORKING_DIR"\n'
908 +        txt += '        cd $RUNTIME_AREA\n'
909 +        txt += '        /bin/rm -rf $WORKING_DIR\n'
910 +        txt += '        if [ -d $WORKING_DIR ] ;then\n'
911 +        txt += '        echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
912 +        txt += '        echo "JOB_EXIT_STATUS = 50114"\n'
913 +        txt += '        echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
914 +        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
915 +        txt += '            rm -f $RUNTIME_AREA/$repo \n'
916 +        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
917 +        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
918 +        txt += '        fi\n'
919 +        txt += '    fi \n'
920          txt += "    exit 1\n"
921          txt += "fi\n"
922          txt += "\n"
393        txt += "NJob=$1\n"
394        # txt += "FirstEvent=$2\n"
395        # txt += "MaxEvents=$3\n"
923  
924          # Prepare job-specific part
925          job = common.job_list[nj]
926 <        pset = os.path.basename(job.configFilename())
927 <        txt += '\n'
928 <        txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n'
929 <        # txt += 'if [ -e $RUNTIME_AREA/orcarc_$CE ] ; then\n'
930 <        # txt += '  cat $RUNTIME_AREA/orcarc_$CE .orcarc >> .orcarc_tmp\n'
931 <        # txt += '  mv .orcarc_tmp .orcarc\n'
932 <        # txt += 'fi\n'
933 <        # txt += 'if [ -e $RUNTIME_AREA/init_$CE.sh ] ; then\n'
934 <        # txt += '  cp $RUNTIME_AREA/init_$CE.sh init.sh\n'
935 <        # txt += 'fi\n'
926 >        ### FEDE FOR DBS OUTPUT PUBLICATION
927 >        if (self.datasetPath):
928 >            txt += '\n'
929 >            txt += 'DatasetPath='+self.datasetPath+'\n'
930 >
931 >            datasetpath_split = self.datasetPath.split("/")
932 >            
933 >            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
934 >            txt += 'DataTier='+datasetpath_split[2]+'\n'
935 >            txt += 'ProcessedDataset='+datasetpath_split[3]+'\n'
936 >            txt += 'ApplicationFamily=Online\n'
937 >
938 >        else:
939 >            txt += 'DatasetPath=MCDataTier\n'
940 >            txt += 'PrimaryDataset=null\n'
941 >            txt += 'DataTier=null\n'
942 >            txt += 'ProcessedDataset=null\n'
943 >            txt += 'ApplicationFamily=MCDataTier\n'
944 >        ##################    
945 >        if self.pset != None: #CarlosDaniele
946 >            pset = os.path.basename(job.configFilename())
947 >            txt += '\n'
948 >            if (self.datasetPath): # standard job
949 >                #txt += 'InputFiles=$2\n'
950 >                txt += 'InputFiles=${args[1]}\n'
951 >                txt += 'MaxEvents=${args[2]}\n'
952 >                txt += 'SkipEvents=${args[3]}\n'
953 >                txt += 'echo "Inputfiles:<$InputFiles>"\n'
954 >                txt += 'sed "s#{\'INPUT\'}#$InputFiles#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
955 >                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
956 >                txt += 'sed "s#INPUTMAXEVENTS#$MaxEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
957 >                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
958 >                txt += 'sed "s#INPUTSKIPEVENTS#$SkipEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
959 >            else:  # pythia like job
960 >                seedIndex=1
961 >                if (self.firstRun):
962 >                    txt += 'FirstRun=${args['+str(seedIndex)+']}\n'
963 >                    txt += 'echo "FirstRun: <$FirstRun>"\n'
964 >                    txt += 'sed "s#\<INPUTFIRSTRUN\>#$FirstRun#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
965 >                    seedIndex=seedIndex+1
966 >
967 >                if (self.sourceSeed):
968 >                    txt += 'Seed=${args['+str(seedIndex)+']}\n'
969 >                    txt += 'sed "s#\<INPUT\>#$Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
970 >                    seedIndex=seedIndex+1
971 >                    ## the following seeds are not always present
972 >                    if (self.sourceSeedVtx):
973 >                        txt += 'VtxSeed=${args['+str(seedIndex)+']}\n'
974 >                        txt += 'echo "VtxSeed: <$VtxSeed>"\n'
975 >                        txt += 'sed "s#\<INPUTVTX\>#$VtxSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
976 >                        seedIndex += 1
977 >                    if (self.sourceSeedG4):
978 >                        txt += 'G4Seed=${args['+str(seedIndex)+']}\n'
979 >                        txt += 'echo "G4Seed: <$G4Seed>"\n'
980 >                        txt += 'sed "s#\<INPUTG4\>#$G4Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
981 >                        seedIndex += 1
982 >                    if (self.sourceSeedMix):
983 >                        txt += 'mixSeed=${args['+str(seedIndex)+']}\n'
984 >                        txt += 'echo "MixSeed: <$mixSeed>"\n'
985 >                        txt += 'sed "s#\<INPUTMIX\>#$mixSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
986 >                        seedIndex += 1
987 >                    pass
988 >                pass
989 >            txt += 'mv -f '+pset+' pset.cfg\n'
990  
991          if len(self.additional_inbox_files) > 0:
992              for file in self.additional_inbox_files:
993 <                txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
994 <                txt += '   cp $RUNTIME_AREA/'+file+' .\n'
995 <                txt += '   chmod +x '+file+'\n'
993 >                relFile = file.split("/")[-1]
994 >                txt += 'if [ -e $RUNTIME_AREA/'+relFile+' ] ; then\n'
995 >                txt += '   cp $RUNTIME_AREA/'+relFile+' .\n'
996 >                txt += '   chmod +x '+relFile+'\n'
997                  txt += 'fi\n'
998              pass
999  
1000 <        # txt += '\n'
1001 <        # txt += 'chmod +x ./init.sh\n'
1002 <        # txt += './init.sh\n'
1003 <        # txt += 'exitStatus=$?\n'
1004 <        # txt += 'if [ $exitStatus != 0 ] ; then\n'
1005 <        # txt += '  echo "SET_EXE_ENV 1 ==> ERROR StageIn init script failed"\n'
1006 <        # txt += '  echo "JOB_EXIT_STATUS = $exitStatus" \n'
1007 <        # txt += '  echo "SanityCheckCode = $exitStatus" | tee -a $RUNTIME_AREA/$repo\n'
1008 <        # txt += '  dumpStatus $RUNTIME_AREA/$repo\n'
1009 <        # txt += '  exit $exitStatus\n'
1010 <        # txt += 'fi\n'
1011 <        # txt += "echo 'SET_EXE_ENV 0 ==> job setup ok'\n"
1012 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
1013 <
1014 <        # txt += 'echo "FirstEvent=$FirstEvent" >> .orcarc\n'
1015 <        # txt += 'echo "MaxEvents=$MaxEvents" >> .orcarc\n'
434 <        # if self.ML:
435 <        #     txt += 'echo "MonalisaJobId=$NJob" >> .orcarc\n'
1000 >        if self.pset != None: #CarlosDaniele
1001 >            txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
1002 >        
1003 >            txt += '\n'
1004 >            txt += 'echo "***** cat pset.cfg *********"\n'
1005 >            txt += 'cat pset.cfg\n'
1006 >            txt += 'echo "****** end pset.cfg ********"\n'
1007 >            txt += '\n'
1008 >            ### FEDE FOR DBS OUTPUT PUBLICATION
1009 >            txt += 'PSETHASH=`EdmConfigHash < pset.cfg`'
1010 >            ##############
1011 >            txt += '\n'
1012 >            # txt += 'echo "***** cat pset1.cfg *********"\n'
1013 >            # txt += 'cat pset1.cfg\n'
1014 >            # txt += 'echo "****** end pset1.cfg ********"\n'
1015 >        return txt
1016  
1017 <        txt += '\n'
1018 <        txt += 'echo "***** cat pset.cfg *********"\n'
1019 <        txt += 'cat pset.cfg\n'
1020 <        txt += 'echo "****** end pset.cfg ********"\n'
1017 >    def wsBuildExe(self, nj=0):
1018 >        """
1019 >        Put in the script the commands to build an executable
1020 >        or a library.
1021 >        """
1022 >
1023 >        txt = ""
1024 >
1025 >        if os.path.isfile(self.tgzNameWithPath):
1026 >            txt += 'echo "tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'"\n'
1027 >            txt += 'tar xzvf $RUNTIME_AREA/'+os.path.basename(self.tgzNameWithPath)+'\n'
1028 >            txt += 'untar_status=$? \n'
1029 >            txt += 'if [ $untar_status -ne 0 ]; then \n'
1030 >            txt += '   echo "SET_EXE 1 ==> ERROR Untarring .tgz file failed"\n'
1031 >            txt += '   echo "JOB_EXIT_STATUS = $untar_status" \n'
1032 >            txt += '   echo "JobExitCode=$untar_status" | tee -a $RUNTIME_AREA/$repo\n'
1033 >            txt += '   if [ $middleware == OSG ]; then \n'
1034 >            txt += '       echo "Remove working directory: $WORKING_DIR"\n'
1035 >            txt += '       cd $RUNTIME_AREA\n'
1036 >            txt += '       /bin/rm -rf $WORKING_DIR\n'
1037 >            txt += '       if [ -d $WORKING_DIR ] ;then\n'
1038 >            txt += '           echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n'
1039 >            txt += '           echo "JOB_EXIT_STATUS = 50999"\n'
1040 >            txt += '           echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n'
1041 >            txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1042 >            txt += '           rm -f $RUNTIME_AREA/$repo \n'
1043 >            txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1044 >            txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1045 >            txt += '       fi\n'
1046 >            txt += '   fi \n'
1047 >            txt += '   \n'
1048 >            txt += '   exit 1 \n'
1049 >            txt += 'else \n'
1050 >            txt += '   echo "Successful untar" \n'
1051 >            txt += 'fi \n'
1052 >            txt += '\n'
1053 >            txt += 'echo "Include ProdAgentApi and PRODCOMMON in PYTHONPATH"\n'
1054 >            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
1055 >            #### FEDE FOR DBS OUTPUT PUBLICATION
1056 >            txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon\n'
1057 >            #txt += '   export PYTHONPATH=ProdAgentApi\n'
1058 >            txt += 'else\n'
1059 >            txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon:${PYTHONPATH}\n'
1060 >            #txt += '   export PYTHONPATH=ProdAgentApi:${PYTHONPATH}\n'
1061 >            txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1062 >            ###################  
1063 >            txt += 'fi\n'
1064 >            txt += '\n'
1065 >
1066 >            pass
1067 >        
1068          return txt
1069  
1070      def modifySteeringCards(self, nj):
# Line 447 | Line 1074 | class Cmssw(JobType):
1074          """
1075          
1076      def executableName(self):
1077 <        return self.executable
1077 >        if self.scriptExe: #CarlosDaniele
1078 >            return "sh "
1079 >        else:
1080 >            return self.executable
1081  
1082      def executableArgs(self):
1083 <        return "-p pset.cfg"
1083 >        if self.scriptExe:#CarlosDaniele
1084 >            return   self.scriptExe + " $NJob"
1085 >        else:
1086 >            return " -p pset.cfg"
1087  
1088      def inputSandbox(self, nj):
1089          """
1090          Returns a list of filenames to be put in JDL input sandbox.
1091          """
1092          inp_box = []
1093 <        # dict added to delete duplicate from input sandbox file list
1094 <        seen = {}
1093 >        # # dict added to delete duplicate from input sandbox file list
1094 >        # seen = {}
1095          ## code
1096          if os.path.isfile(self.tgzNameWithPath):
1097              inp_box.append(self.tgzNameWithPath)
1098 +        if os.path.isfile(self.MLtgzfile):
1099 +            inp_box.append(self.MLtgzfile)
1100          ## config
1101 <        inp_box.append(common.job_list[nj].configFilename())
1101 >        if not self.pset is None:
1102 >            inp_box.append(common.work_space.pathForTgz() + 'job/' + self.configFilename())
1103          ## additional input files
1104          for file in self.additional_inbox_files:
1105 <            inp_box.append(common.work_space.cwdDir()+file)
470 <        #print "sono inputSandbox, inp_box = ", inp_box
1105 >            inp_box.append(file)
1106          return inp_box
1107  
1108      def outputSandbox(self, nj):
# Line 476 | Line 1111 | class Cmssw(JobType):
1111          """
1112          out_box = []
1113  
479        stdout=common.job_list[nj].stdout()
480        stderr=common.job_list[nj].stderr()
481
1114          ## User Declared output files
1115 <        for out in self.output_file:
1115 >        for out in (self.output_file+self.output_file_sandbox):
1116              n_out = nj + 1
1117              out_box.append(self.numberFile_(out,str(n_out)))
1118          return out_box
487        return []
1119  
1120      def prepareSteeringCards(self):
1121          """
1122          Make initial modifications of the user's steering card file.
1123          """
493        infile = open(self.pset,'r')
494            
495        outfile = open(common.work_space.jobDir()+self.name()+'.cfg', 'w')
496          
497        outfile.write('\n\n##### The following cards have been created by CRAB: DO NOT TOUCH #####\n')
498
499        outfile.write('InputCollections=/System/'+self.owner+'/'+self.dataset+'/'+self.dataset+'\n')
500
501        infile.close()
502        outfile.close()
1124          return
1125  
1126      def wsRenameOutput(self, nj):
# Line 508 | Line 1129 | class Cmssw(JobType):
1129          """
1130  
1131          txt = '\n'
1132 <        file_list = ''
1133 <        for fileWithSuffix in self.output_file:
1132 >        txt += '# directory content\n'
1133 >        txt += 'ls \n'
1134 >
1135 >        for fileWithSuffix in (self.output_file+self.output_file_sandbox):
1136              output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
514            file_list=file_list+output_file_num+','
515            txt += '\n'
516            txt += 'ls \n'
1137              txt += '\n'
1138 +            txt += '# check output file\n'
1139              txt += 'ls '+fileWithSuffix+'\n'
1140 <            txt += 'exe_result=$?\n'
1141 <            txt += 'if [ $exe_result -ne 0 ] ; then\n'
1142 <            txt += '   echo "ERROR: No output file to manage"\n'
1143 <            txt += '   echo "JOB_EXIT_STATUS = $exe_result"\n'
1144 <            txt += '   echo "SanityCheckCode = $exe_result" | tee -a $RUNTIME_AREA/$repo\n'
1145 <            txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
1146 <            txt += '   exit $exe_result \n'
1140 >            txt += 'ls_result=$?\n'
1141 >            txt += 'if [ $ls_result -ne 0 ] ; then\n'
1142 >            txt += '   exit_status=60302\n'
1143 >            txt += '   echo "ERROR: Problem with output file"\n'
1144 >            if common.scheduler.boss_scheduler_name == 'condor_g':
1145 >                txt += '    if [ $middleware == OSG ]; then \n'
1146 >                txt += '        echo "prepare dummy output file"\n'
1147 >                txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1148 >                txt += '    fi \n'
1149              txt += 'else\n'
1150 <            txt += '   cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1150 >            ### FEDE FOR DBS OUTPUT PUBLICATION
1151 >            txt += '   mv '+fileWithSuffix+' $RUNTIME_AREA\n'
1152 >            txt += '   cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1153 >            #################################
1154              txt += 'fi\n'
529            txt += 'cd $RUNTIME_AREA\n'
530                      
531            pass
1155        
1156 +        txt += 'cd $RUNTIME_AREA\n'
1157 +        ### OLI_DANIELE
1158 +        txt += 'if [ $middleware == OSG ]; then\n'  
1159 +        txt += '    cd $RUNTIME_AREA\n'
1160 +        txt += '    echo "Remove working directory: $WORKING_DIR"\n'
1161 +        txt += '    /bin/rm -rf $WORKING_DIR\n'
1162 +        txt += '    if [ -d $WORKING_DIR ] ;then\n'
1163 +        txt += '    echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1164 +        txt += '    echo "JOB_EXIT_STATUS = 60999"\n'
1165 +        txt += '    echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1166 +        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
1167 +        txt += '        rm -f $RUNTIME_AREA/$repo \n'
1168 +        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1169 +        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1170 +        txt += '    fi\n'
1171 +        txt += 'fi\n'
1172 +        txt += '\n'
1173 +
1174 +        file_list = ''
1175 +        ## Add to filelist only files to be possibly copied to SE
1176 +        for fileWithSuffix in self.output_file:
1177 +            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1178 +            file_list=file_list+output_file_num+' '
1179          file_list=file_list[:-1]
1180 <        txt += 'file_list='+file_list+'\n'
1180 >        txt += 'file_list="'+file_list+'"\n'
1181 >
1182          return txt
1183  
1184      def numberFile_(self, file, txt):
# Line 542 | Line 1189 | class Cmssw(JobType):
1189          # take away last extension
1190          name = p[0]
1191          for x in p[1:-1]:
1192 <           name=name+"."+x
1192 >            name=name+"."+x
1193          # add "_txt"
1194          if len(p)>1:
1195 <          ext = p[len(p)-1]
1196 <          #result = name + '_' + str(txt) + "." + ext
550 <          result = name + '_' + txt + "." + ext
1195 >            ext = p[len(p)-1]
1196 >            result = name + '_' + txt + "." + ext
1197          else:
1198 <          #result = name + '_' + str(txt)
553 <          result = name + '_' + txt
1198 >            result = name + '_' + txt
1199          
1200          return result
1201  
1202 <    def getRequirements(self):
1202 >    def getRequirements(self, nj=[]):
1203          """
1204          return job requirements to add to jdl files
1205          """
1206          req = ''
1207 <        if common.analisys_common_info['sites']:
1208 <            if common.analisys_common_info['sw_version']:
1209 <                req='Member("VO-cms-' + \
1210 <                     common.analisys_common_info['sw_version'] + \
1211 <                     '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1212 <            if len(common.analisys_common_info['sites'])>0:
1213 <                req = req + ' && ('
1214 <                for i in range(len(common.analisys_common_info['sites'])):
1215 <                    req = req + 'other.GlueCEInfoHostName == "' \
1216 <                         + common.analisys_common_info['sites'][i] + '"'
1217 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
573 <                        req = req + ' || '
574 <            req = req + ')'
575 <        #print "req = ", req
1207 >        if self.version:
1208 >            req='Member("VO-cms-' + \
1209 >                 self.version + \
1210 >                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1211 >        # if self.executable_arch:
1212 >        #     req='Member("VO-cms-' + \
1213 >        #          self.executable_arch + \
1214 >        #          '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1215 >
1216 >        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1217 >
1218          return req
1219 +
1220 +    def configFilename(self):
1221 +        """ return the config filename """
1222 +        return self.name()+'.cfg'
1223 +
1224 +    ### OLI_DANIELE
1225 +    def wsSetupCMSOSGEnvironment_(self):
1226 +        """
1227 +        Returns part of a job script which is prepares
1228 +        the execution environment and which is common for all CMS jobs.
1229 +        """
1230 +        txt = '\n'
1231 +        txt += '   echo "### SETUP CMS OSG  ENVIRONMENT ###"\n'
1232 +        txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
1233 +        txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
1234 +        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
1235 +        txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1236 +        txt += '   elif [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1237 +        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1238 +        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
1239 +        txt += '       source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1240 +        txt += '   else\n'
1241 +        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1242 +        txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
1243 +        txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1244 +        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1245 +        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1246 +        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1247 +        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1248 +        txt += '       exit 1\n'
1249 +        txt += '\n'
1250 +        txt += '       echo "Remove working directory: $WORKING_DIR"\n'
1251 +        txt += '       cd $RUNTIME_AREA\n'
1252 +        txt += '       /bin/rm -rf $WORKING_DIR\n'
1253 +        txt += '       if [ -d $WORKING_DIR ] ;then\n'
1254 +        txt += '        echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1255 +        txt += '        echo "JOB_EXIT_STATUS = 10017"\n'
1256 +        txt += '        echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1257 +        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1258 +        txt += '            rm -f $RUNTIME_AREA/$repo \n'
1259 +        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1260 +        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1261 +        txt += '       fi\n'
1262 +        txt += '\n'
1263 +        txt += '       exit 1\n'
1264 +        txt += '   fi\n'
1265 +        txt += '\n'
1266 +        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1267 +        txt += '   echo " END SETUP CMS OSG  ENVIRONMENT "\n'
1268 +
1269 +        return txt
1270 +
1271 +    ### OLI_DANIELE
1272 +    def wsSetupCMSLCGEnvironment_(self):
1273 +        """
1274 +        Returns part of a job script which is prepares
1275 +        the execution environment and which is common for all CMS jobs.
1276 +        """
1277 +        txt  = '   \n'
1278 +        txt += '   echo " ### SETUP CMS LCG  ENVIRONMENT ### "\n'
1279 +        txt += '   if [ ! $VO_CMS_SW_DIR ] ;then\n'
1280 +        txt += '       echo "SET_CMS_ENV 10031 ==> ERROR CMS software dir not found on WN `hostname`"\n'
1281 +        txt += '       echo "JOB_EXIT_STATUS = 10031" \n'
1282 +        txt += '       echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
1283 +        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1284 +        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1285 +        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1286 +        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1287 +        txt += '       exit 1\n'
1288 +        txt += '   else\n'
1289 +        txt += '       echo "Sourcing environment... "\n'
1290 +        txt += '       if [ ! -s $VO_CMS_SW_DIR/cmsset_default.sh ] ;then\n'
1291 +        txt += '           echo "SET_CMS_ENV 10020 ==> ERROR cmsset_default.sh file not found into dir $VO_CMS_SW_DIR"\n'
1292 +        txt += '           echo "JOB_EXIT_STATUS = 10020"\n'
1293 +        txt += '           echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1294 +        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1295 +        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1296 +        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1297 +        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1298 +        txt += '           exit 1\n'
1299 +        txt += '       fi\n'
1300 +        txt += '       echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1301 +        txt += '       source $VO_CMS_SW_DIR/cmsset_default.sh\n'
1302 +        txt += '       result=$?\n'
1303 +        txt += '       if [ $result -ne 0 ]; then\n'
1304 +        txt += '           echo "SET_CMS_ENV 10032 ==> ERROR problem sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
1305 +        txt += '           echo "JOB_EXIT_STATUS = 10032"\n'
1306 +        txt += '           echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1307 +        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1308 +        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1309 +        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1310 +        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1311 +        txt += '           exit 1\n'
1312 +        txt += '       fi\n'
1313 +        txt += '   fi\n'
1314 +        txt += '   \n'
1315 +        txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1316 +        txt += '   echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
1317 +        return txt
1318 +
1319 +    ### FEDE FOR DBS OUTPUT PUBLICATION
1320 +    def modifyReport(self, nj):
1321 +        """
1322 +        insert the part of the script that modifies the FrameworkJob Report
1323 +        """
1324 +        
1325 +        txt = ''
1326 +        txt += 'echo "Modify Job Report" \n'
1327 +        txt += 'chmod a+x $RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1328 +        txt += 'if [ -z "$SE" ]; then\n'
1329 +        txt += '    SE="" \n'
1330 +        txt += 'fi \n'
1331 +        txt += 'if [ -z "$SE_PATH" ]; then\n'
1332 +        txt += '    SE_PATH="" \n'
1333 +        txt += 'fi \n'
1334 +        txt += 'echo "SE = $SE"\n'
1335 +        txt += 'echo "SE_PATH = $SE_PATH"\n'
1336 +        txt += 'FOR_LFN=$DatasetPath/$MonitorID\n'
1337 +        txt += 'echo "FOR_LFN = $FOR_LFN"\n'
1338 +        txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n'
1339 +        txt += 'echo "$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1340 +        txt += '$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1341 +        txt += 'modifyReport_result=$?\n'
1342 +        txt += 'echo modifyReport_result = $modifyReport_result\n'
1343 +        txt += 'if [ $modify_result -ne 0 ]; then\n'
1344 +        txt += '    exit_status=1\n'
1345 +        txt += '    echo "ERROR: Problem with ModifyJobReport"\n'
1346 +        txt += 'else\n'
1347 +        txt += '    mv NewFrameworkJobReport.xml crab_fjr_$NJob.xml\n'
1348 +        txt += 'fi\n'
1349 +        return txt
1350 +    #################
1351 +
1352 +    def setParam_(self, param, value):
1353 +        self._params[param] = value
1354 +
1355 +    def getParams(self):
1356 +        return self._params
1357 +
1358 +    def setTaskid_(self):
1359 +        self._taskId = self.cfg_params['taskId']
1360 +        
1361 +    def getTaskid(self):
1362 +        return self._taskId
1363 +
1364 + #######################################################################
1365 +    def uniquelist(self, old):
1366 +        """
1367 +        remove duplicates from a list
1368 +        """
1369 +        nd={}
1370 +        for e in old:
1371 +            nd[e]=0
1372 +        return nd.keys()

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines