ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.33 by mkirn, Fri Jul 28 18:19:34 2006 UTC vs.
Revision 1.93 by fanzago, Tue Jun 19 17:22:21 2007 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 import math
5   import common
7 import PsetManipulator  
8
9 import DBSInfo_EDM
10 import DataDiscovery_EDM
11 import DataLocation_EDM
6   import Scram
7  
8 < import os, string, re
8 > import os, string, re, shutil, glob
9  
10   class Cmssw(JobType):
11 <    def __init__(self, cfg_params):
11 >    def __init__(self, cfg_params, ncjobs):
12          JobType.__init__(self, 'CMSSW')
13          common.logger.debug(3,'CMSSW::__init__')
14  
21        self.analisys_common_info = {}
15          # Marco.
16          self._params = {}
17          self.cfg_params = cfg_params
18 +
19 +        try:
20 +            self.MaxTarBallSize = float(self.cfg_params['EDG.maxtarballsize'])
21 +        except KeyError:
22 +            self.MaxTarBallSize = 9.5
23 +
24 +        # number of jobs requested to be created, limit obj splitting
25 +        self.ncjobs = ncjobs
26 +
27          log = common.logger
28          
29          self.scram = Scram.Scram(cfg_params)
28        scramArea = ''
30          self.additional_inbox_files = []
31          self.scriptExe = ''
32          self.executable = ''
33 +        self.executable_arch = self.scram.getArch()
34          self.tgz_name = 'default.tgz'
35 +        self.scriptName = 'CMSSW.sh'
36 +        self.pset = ''      #scrip use case Da  
37 +        self.datasetPath = '' #scrip use case Da
38  
39 +        # set FJR file name
40 +        self.fjrFileName = 'crab_fjr.xml'
41  
42          self.version = self.scram.getSWVersion()
43 +        common.taskDB.setDict('codeVersion',self.version)
44          self.setParam_('application', self.version)
37        common.analisys_common_info['sw_version'] = self.version
38        ### FEDE
39        common.analisys_common_info['copy_input_data'] = 0
40        common.analisys_common_info['events_management'] = 1
45  
46          ### collect Data cards
47 +
48 +        ## get DBS mode
49 +        try:
50 +            self.use_dbs_1 = int(self.cfg_params['CMSSW.use_dbs_1'])
51 +        except KeyError:
52 +            self.use_dbs_1 = 0
53 +            
54          try:
55              tmp =  cfg_params['CMSSW.datasetpath']
56              log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
# Line 59 | Line 70 | class Cmssw(JobType):
70              self.setParam_('dataset', 'None')
71              self.setParam_('owner', 'None')
72          else:
73 <            datasetpath_split = self.datasetPath.split("/")
74 <            self.setParam_('dataset', datasetpath_split[1])
75 <            self.setParam_('owner', datasetpath_split[-1])
76 <
73 >            try:
74 >                datasetpath_split = self.datasetPath.split("/")
75 >                # standard style
76 >                if self.use_dbs_1 == 1 :
77 >                    self.setParam_('dataset', datasetpath_split[1])
78 >                    self.setParam_('owner', datasetpath_split[-1])
79 >                else:
80 >                    self.setParam_('dataset', datasetpath_split[1])
81 >                    self.setParam_('owner', datasetpath_split[2])
82 >            except:
83 >                self.setParam_('dataset', self.datasetPath)
84 >                self.setParam_('owner', self.datasetPath)
85 >                
86          self.setTaskid_()
87          self.setParam_('taskId', self.cfg_params['taskId'])
88  
# Line 85 | Line 105 | class Cmssw(JobType):
105          try:
106              self.pset = cfg_params['CMSSW.pset']
107              log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
108 <            if (not os.path.exists(self.pset)):
109 <                raise CrabException("User defined PSet file "+self.pset+" does not exist")
108 >            if self.pset.lower() != 'none' :
109 >                if (not os.path.exists(self.pset)):
110 >                    raise CrabException("User defined PSet file "+self.pset+" does not exist")
111 >            else:
112 >                self.pset = None
113          except KeyError:
114              raise CrabException("PSet file missing. Cannot run cmsRun ")
115  
116          # output files
117 +        ## stuff which must be returned always via sandbox
118 +        self.output_file_sandbox = []
119 +
120 +        # add fjr report by default via sandbox
121 +        self.output_file_sandbox.append(self.fjrFileName)
122 +
123 +        # other output files to be returned via sandbox or copied to SE
124          try:
125              self.output_file = []
96
126              tmp = cfg_params['CMSSW.output_file']
127              if tmp != '':
128                  tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
# Line 103 | Line 132 | class Cmssw(JobType):
132                      self.output_file.append(tmp)
133                      pass
134              else:
135 <                log.message("No output file defined: only stdout/err will be available")
135 >                log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
136                  pass
137              pass
138          except KeyError:
139 <            log.message("No output file defined: only stdout/err will be available")
139 >            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
140              pass
141  
142          # script_exe file as additional file in inputSandbox
143          try:
144              self.scriptExe = cfg_params['USER.script_exe']
116            self.additional_inbox_files.append(self.scriptExe)
145              if self.scriptExe != '':
146                 if not os.path.isfile(self.scriptExe):
147 <                  msg ="WARNING. file "+self.scriptExe+" not found"
147 >                  msg ="ERROR. file "+self.scriptExe+" not found"
148                    raise CrabException(msg)
149 +               self.additional_inbox_files.append(string.strip(self.scriptExe))
150          except KeyError:
151 <           pass
152 <                  
151 >            self.scriptExe = ''
152 >
153 >        #CarlosDaniele
154 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
155 >           msg ="Error. script_exe  not defined"
156 >           raise CrabException(msg)
157 >
158          ## additional input files
159          try:
160              tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
161              for tmp in tmpAddFiles:
162 <                if not os.path.exists(tmp):
163 <                    raise CrabException("Additional input file not found: "+tmp)
164 <                self.additional_inbox_files.append(string.strip(tmp))
162 >                tmp = string.strip(tmp)
163 >                dirname = ''
164 >                if not tmp[0]=="/": dirname = "."
165 >                files = []
166 >                if string.find(tmp,"*")>-1:
167 >                    files = glob.glob(os.path.join(dirname, tmp))
168 >                    if len(files)==0:
169 >                        raise CrabException("No additional input file found with this pattern: "+tmp)
170 >                else:
171 >                    files.append(tmp)
172 >                for file in files:
173 >                    if not os.path.exists(file):
174 >                        raise CrabException("Additional input file not found: "+file)
175 >                    pass
176 >                    fname = string.split(file, '/')[-1]
177 >                    storedFile = common.work_space.pathForTgz()+'share/'+fname
178 >                    shutil.copyfile(file, storedFile)
179 >                    self.additional_inbox_files.append(string.strip(storedFile))
180                  pass
181              pass
182 +            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
183          except KeyError:
184              pass
185  
186          # files per job
187          try:
188 <            self.filesPerJob = int(cfg_params['CMSSW.files_per_jobs']) #Daniele
189 <            self.selectFilesPerJob = 1
188 >            if (cfg_params['CMSSW.files_per_jobs']):
189 >                raise CrabException("files_per_jobs no longer supported.  Quitting.")
190          except KeyError:
191 <            self.filesPerJob = 0
142 <            self.selectFilesPerJob = 0
191 >            pass
192  
193          ## Events per job
194          try:
# Line 157 | Line 206 | class Cmssw(JobType):
206              self.theNumberOfJobs = 0
207              self.selectNumberOfJobs = 0
208  
209 +        try:
210 +            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
211 +            self.selectTotalNumberEvents = 1
212 +        except KeyError:
213 +            self.total_number_of_events = 0
214 +            self.selectTotalNumberEvents = 0
215 +
216 +        if self.pset != None: #CarlosDaniele
217 +             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
218 +                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
219 +                 raise CrabException(msg)
220 +        else:
221 +             if (self.selectNumberOfJobs == 0):
222 +                 msg = 'Must specify  number_of_jobs.'
223 +                 raise CrabException(msg)
224 +
225          ## source seed for pythia
226          try:
227              self.sourceSeed = int(cfg_params['CMSSW.pythia_seed'])
# Line 170 | Line 235 | class Cmssw(JobType):
235              self.sourceSeedVtx = None
236              common.logger.debug(5,"No vertex seed given")
237  
173        if not (self.selectFilesPerJob + self.selectEventsPerJob + self.selectNumberOfJobs == 1 ):
174            msg = 'Must define either files_per_jobs or events_per_job or number_of_jobs'
175            raise CrabException(msg)
176
238          try:
239 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
239 >            self.sourceSeedG4 = int(cfg_params['CMSSW.g4_seed'])
240          except KeyError:
241 <            msg = 'Must define total_number_of_events'
242 <            raise CrabException(msg)
243 <        
183 <        CEBlackList = []
241 >            self.sourceSeedG4 = None
242 >            common.logger.debug(5,"No g4 sim hits seed given")
243 >
244          try:
245 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
186 <            for tmp in tmpBad:
187 <                tmp=string.strip(tmp)
188 <                CEBlackList.append(tmp)
245 >            self.sourceSeedMix = int(cfg_params['CMSSW.mix_seed'])
246          except KeyError:
247 <            pass
248 <
192 <        self.reCEBlackList=[]
193 <        for bad in CEBlackList:
194 <            self.reCEBlackList.append(re.compile( bad ))
195 <
196 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
247 >            self.sourceSeedMix = None
248 >            common.logger.debug(5,"No mix seed given")
249  
198        CEWhiteList = []
250          try:
251 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
201 <            for tmp in tmpGood:
202 <                tmp=string.strip(tmp)
203 <                CEWhiteList.append(tmp)
251 >            self.firstRun = int(cfg_params['CMSSW.first_run'])
252          except KeyError:
253 <            pass
254 <
255 <        #print 'CEWhiteList: ',CEWhiteList
256 <        self.reCEWhiteList=[]
257 <        for Good in CEWhiteList:
210 <            self.reCEWhiteList.append(re.compile( Good ))
211 <
212 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
213 <
214 <        self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
253 >            self.firstRun = None
254 >            common.logger.debug(5,"No first run given")
255 >        if self.pset != None: #CarlosDaniele
256 >            import PsetManipulator  
257 >            PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
258  
259          #DBSDLS-start
260          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
261          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
262          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
263 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
264          ## Perform the data location and discovery (based on DBS/DLS)
265          ## SL: Don't if NONE is specified as input (pythia use case)
266 <        common.analisys_common_info['sites']=None
266 >        blockSites = {}
267          if self.datasetPath:
268 <            self.DataDiscoveryAndLocation(cfg_params)
268 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
269          #DBSDLS-end          
270  
271          self.tgzNameWithPath = self.getTarBall(self.executable)
272      
273          ## Select Splitting
274 <        if self.selectNoInput: self.jobSplittingNoInput()
275 <        elif self.selectFilesPerJob or self.selectEventsPerJob or self.selectNumberOfJobs: self.jobSplittingPerFiles()
274 >        if self.selectNoInput:
275 >            if self.pset == None: #CarlosDaniele
276 >                self.jobSplittingForScript()
277 >            else:
278 >                self.jobSplittingNoInput()
279          else:
280 <            msg = 'Don\'t know how to split...'
234 <            raise CrabException(msg)
280 >            self.jobSplittingByBlocks(blockSites)
281  
282          # modify Pset
283 <        try:
284 <            if (self.datasetPath): # standard job
285 <                #self.PsetEdit.maxEvent(self.eventsPerJob)
286 <                # always process all events in a file
287 <                self.PsetEdit.maxEvent("-1")
288 <                self.PsetEdit.inputModule("INPUT")
289 <
290 <            else:  # pythia like job
291 <                self.PsetEdit.maxEvent(self.eventsPerJob)
292 <                if (self.sourceSeed) :
293 <                    self.PsetEdit.pythiaSeed("INPUT")
294 <                    if (self.sourceSeedVtx) :
295 <                        self.PsetEdit.pythiaSeedVtx("INPUTVTX")
296 <            self.PsetEdit.psetWriter(self.configFilename())
297 <        except:
298 <            msg='Error while manipuliating ParameterSet: exiting...'
299 <            raise CrabException(msg)
283 >        if self.pset != None: #CarlosDaniele
284 >            try:
285 >                if (self.datasetPath): # standard job
286 >                    # allow to processa a fraction of events in a file
287 >                    PsetEdit.inputModule("INPUT")
288 >                    PsetEdit.maxEvent("INPUTMAXEVENTS")
289 >                    PsetEdit.skipEvent("INPUTSKIPEVENTS")
290 >                else:  # pythia like job
291 >                    PsetEdit.maxEvent(self.eventsPerJob)
292 >                    if (self.firstRun):
293 >                        PsetEdit.pythiaFirstRun("INPUTFIRSTRUN")  #First Run
294 >                    if (self.sourceSeed) :
295 >                        PsetEdit.pythiaSeed("INPUT")
296 >                        if (self.sourceSeedVtx) :
297 >                            PsetEdit.vtxSeed("INPUTVTX")
298 >                        if (self.sourceSeedG4) :
299 >                            self.PsetEdit.g4Seed("INPUTG4")
300 >                        if (self.sourceSeedMix) :
301 >                            self.PsetEdit.mixSeed("INPUTMIX")
302 >                # add FrameworkJobReport to parameter-set
303 >                PsetEdit.addCrabFJR(self.fjrFileName)
304 >                PsetEdit.psetWriter(self.configFilename())
305 >            except:
306 >                msg='Error while manipuliating ParameterSet: exiting...'
307 >                raise CrabException(msg)
308  
309      def DataDiscoveryAndLocation(self, cfg_params):
310  
311 +        import DataDiscovery
312 +        import DataDiscovery_DBS2
313 +        import DataLocation
314          common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
315  
316          datasetPath=self.datasetPath
317  
261        ## TODO
262        dataTiersList = ""
263        dataTiers = dataTiersList.split(',')
264
318          ## Contact the DBS
319 +        common.logger.message("Contacting Data Discovery Services ...")
320          try:
321 <            self.pubdata=DataDiscovery_EDM.DataDiscovery_EDM(datasetPath, dataTiers, cfg_params)
321 >
322 >            if self.use_dbs_1 == 1 :
323 >                self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
324 >            else :
325 >                self.pubdata=DataDiscovery_DBS2.DataDiscovery_DBS2(datasetPath, cfg_params)
326              self.pubdata.fetchDBSInfo()
327  
328 <        except DataDiscovery_EDM.NotExistingDatasetError, ex :
328 >        except DataDiscovery.NotExistingDatasetError, ex :
329              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
330              raise CrabException(msg)
331 <
332 <        except DataDiscovery_EDM.NoDataTierinProvenanceError, ex :
331 >        except DataDiscovery.NoDataTierinProvenanceError, ex :
332 >            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
333 >            raise CrabException(msg)
334 >        except DataDiscovery.DataDiscoveryError, ex:
335 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
336 >            raise CrabException(msg)
337 >        except DataDiscovery_DBS2.NotExistingDatasetError_DBS2, ex :
338 >            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
339 >            raise CrabException(msg)
340 >        except DataDiscovery_DBS2.NoDataTierinProvenanceError_DBS2, ex :
341              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
342              raise CrabException(msg)
343 <        except DataDiscovery_EDM.DataDiscoveryError, ex:
344 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
343 >        except DataDiscovery_DBS2.DataDiscoveryError_DBS2, ex:
344 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
345              raise CrabException(msg)
346  
347 <        ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
348 <        ## self.DBSPaths=self.pubdata.getDBSPaths()
349 <        common.logger.message("Required data are :"+self.datasetPath)
284 <
285 <        filesbyblock=self.pubdata.getFiles()
286 < #        print filesbyblock
287 <        self.AllInputFiles=filesbyblock.values()
288 <        self.files = self.AllInputFiles        
347 >        self.filesbyblock=self.pubdata.getFiles()
348 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
349 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
350  
351          ## get max number of events
291        #common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
352          self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
293        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
353  
354          ## Contact the DLS and build a list of sites hosting the fileblocks
355          try:
356 <            dataloc=DataLocation_EDM.DataLocation_EDM(filesbyblock.keys(),cfg_params)
356 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
357              dataloc.fetchDLSInfo()
358 <        except DataLocation_EDM.DataLocationError , ex:
358 >        except DataLocation.DataLocationError , ex:
359              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
360              raise CrabException(msg)
361          
303        allsites=dataloc.getSites()
304        common.logger.debug(5,"sites are %s"%allsites)
305        sites=self.checkBlackList(allsites)
306        common.logger.debug(5,"sites are (after black list) %s"%sites)
307        sites=self.checkWhiteList(sites)
308        common.logger.debug(5,"sites are (after white list) %s"%sites)
362  
363 <        if len(sites)==0:
364 <            msg = 'No sites hosting all the needed data! Exiting... '
365 <            raise CrabException(msg)
363 >        sites = dataloc.getSites()
364 >        allSites = []
365 >        listSites = sites.values()
366 >        for listSite in listSites:
367 >            for oneSite in listSite:
368 >                allSites.append(oneSite)
369 >        allSites = self.uniquelist(allSites)
370  
371 <        common.logger.message("List of Sites ("+str(len(sites))+") hosting the data : "+str(sites))
372 <        common.logger.debug(6, "List of Sites: "+str(sites))
373 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
374 <        self.setParam_('TargetCE', ','.join(sites))
318 <        return
371 >        # screen output
372 >        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
373 >
374 >        return sites
375      
376 <    def jobSplittingPerFiles(self):
321 <        """
322 <        Perform job splitting based on number of files to be accessed per job
376 >    def jobSplittingByBlocks(self, blockSites):
377          """
378 <        common.logger.debug(5,'Splitting per input files')
379 <        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
380 <        common.logger.message('Available '+str(self.maxEvents)+' events in total ')
381 <        common.logger.message('Required '+str(self.filesPerJob)+' files per job ')
382 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
383 <        common.logger.message('Required '+str(self.eventsPerJob)+' events per job')
384 <
385 <        ## if asked to process all events, do it
386 <        if self.total_number_of_events == -1:
387 <            self.total_number_of_events=self.maxEvents
378 >        Perform job splitting. Jobs run over an integer number of files
379 >        and no more than one block.
380 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
381 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
382 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
383 >                  self.maxEvents, self.filesbyblock
384 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
385 >              self.total_number_of_jobs - Total # of jobs
386 >              self.list_of_args - File(s) job will run on (a list of lists)
387 >        """
388 >
389 >        # ---- Handle the possible job splitting configurations ---- #
390 >        if (self.selectTotalNumberEvents):
391 >            totalEventsRequested = self.total_number_of_events
392 >        if (self.selectEventsPerJob):
393 >            eventsPerJobRequested = self.eventsPerJob
394 >            if (self.selectNumberOfJobs):
395 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
396 >
397 >        # If user requested all the events in the dataset
398 >        if (totalEventsRequested == -1):
399 >            eventsRemaining=self.maxEvents
400 >        # If user requested more events than are in the dataset
401 >        elif (totalEventsRequested > self.maxEvents):
402 >            eventsRemaining = self.maxEvents
403 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
404 >        # If user requested less events than are in the dataset
405          else:
406 <            if self.total_number_of_events>self.maxEvents:
336 <                common.logger.message("Asked "+str(self.total_number_of_events)+" but only "+str(self.maxEvents)+" available.")
337 <                self.total_number_of_events=self.maxEvents
338 <            pass
406 >            eventsRemaining = totalEventsRequested
407  
408 <        ## TODO: SL need to have (from DBS) a detailed list of how many events per each file
409 <        n_tot_files = (len(self.files[0]))
410 <        ## SL: this is wrong if the files have different number of events
343 <        evPerFile = int(self.maxEvents)/n_tot_files
344 <
345 <        common.logger.debug(5,'Events per File '+str(evPerFile))
346 <
347 <        ## compute job splitting parameters: filesPerJob, eventsPerJob and theNumberOfJobs
348 <        if self.selectFilesPerJob:
349 <            ## user define files per event.
350 <            filesPerJob = self.filesPerJob
351 <            eventsPerJob = filesPerJob*evPerFile
352 <            theNumberOfJobs = int(self.total_number_of_events*1./eventsPerJob)
353 <            check = int(self.total_number_of_events) - (theNumberOfJobs*eventsPerJob)
354 <            if check > 0:
355 <                theNumberOfJobs +=1
356 <                filesLastJob = int(check*1./evPerFile+0.5)
357 <                common.logger.message('Warning: last job will be created with '+str(check)+' files')
358 <            else:
359 <                filesLastJob = filesPerJob
408 >        # If user requested more events per job than are in the dataset
409 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
410 >            eventsPerJobRequested = self.maxEvents
411  
412 <        elif self.selectNumberOfJobs:
413 <            ## User select the number of jobs: last might be bigger to match request of events
363 <            theNumberOfJobs =  self.theNumberOfJobs
364 <
365 <            eventsPerJob = self.total_number_of_events/theNumberOfJobs
366 <            filesPerJob = int(eventsPerJob/evPerFile)
367 <            if (filesPerJob==0) : filesPerJob=1
368 <            check = int(self.total_number_of_events) - (int(theNumberOfJobs)*filesPerJob*evPerFile)
369 <            if not check == 0:
370 <                if check<0:
371 <                    missingFiles = int(check/evPerFile)
372 <                    additionalJobs = int(missingFiles/filesPerJob)
373 <                    #print missingFiles, additionalJobs
374 <                    theNumberOfJobs+=additionalJobs
375 <                    common.logger.message('Warning: will create only '+str(theNumberOfJobs)+' jobs')
376 <                    check = int(self.total_number_of_events) - (int(theNumberOfJobs)*filesPerJob*evPerFile)
377 <                    
378 <                if check >0 :
379 <                    filesLastJob = filesPerJob+int(check*1./evPerFile+0.5)
380 <                    common.logger.message('Warning: last job will be created with '+str(filesLastJob*evPerFile)+' events')
381 <                else:
382 <                    filesLastJob = filesPerJob
383 <            else:
384 <                filesLastJob = filesPerJob
385 <        elif self.selectEventsPerJob:
386 <            # SL case if asked events per job
387 <            ## estimate the number of files per job to match the user requirement
388 <            filesPerJob = int(float(self.eventsPerJob)/float(evPerFile))
389 <            if filesPerJob==0: filesPerJob=1
390 <            common.logger.debug(5,"filesPerJob "+str(filesPerJob))
391 <            if (filesPerJob==0): filesPerJob=1
392 <            eventsPerJob=filesPerJob*evPerFile
393 <            theNumberOfJobs = int(self.total_number_of_events)/int(eventsPerJob)
394 <            check = int(self.total_number_of_events) - (int(theNumberOfJobs)*eventsPerJob)
395 <            if not check == 0:
396 <                missingFiles = int(check/evPerFile)
397 <                additionalJobs = int(missingFiles/filesPerJob)
398 <                if ( additionalJobs>0) : theNumberOfJobs+=additionalJobs
399 <                check = int(self.total_number_of_events) - (int(theNumberOfJobs)*eventsPerJob)
400 <                if not check == 0:
401 <                    if (check <0 ):
402 <                        filesLastJob = filesPerJob+int(check*1./evPerFile-0.5)
403 <                    else:
404 <                        theNumberOfJobs+=1
405 <                        filesLastJob = int(check*1./evPerFile+0.5)
412 >        # For user info at end
413 >        totalEventCount = 0
414  
415 <                    common.logger.message('Warning: last job will be created with '+str(filesLastJob*evPerFile)+' events')
416 <                else:
409 <                    filesLastJob = filesPerJob
410 <            else:
411 <                filesLastJob = filesPerJob
412 <        
413 <        self.total_number_of_jobs = theNumberOfJobs
415 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
416 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
417  
418 <        totalEventsToBeUsed=theNumberOfJobs*filesPerJob*evPerFile
419 <        if not check == 0:
417 <        #    print (theNumberOfJobs-1)*filesPerJob*evPerFile,filesLastJob*evPerFile
418 <            totalEventsToBeUsed=(theNumberOfJobs-1)*filesPerJob*evPerFile+filesLastJob*evPerFile
418 >        if (self.selectNumberOfJobs):
419 >            common.logger.message("May not create the exact number_of_jobs requested.")
420  
421 <        common.logger.message(str(self.total_number_of_jobs)+' jobs will be created, each for '+str(filesPerJob*evPerFile)+' events, for a total of '+str(totalEventsToBeUsed)+' events')
421 >        if ( self.ncjobs == 'all' ) :
422 >            totalNumberOfJobs = 999999999
423 >        else :
424 >            totalNumberOfJobs = self.ncjobs
425 >            
426  
427 <        totalFilesToBeUsed=filesPerJob*(theNumberOfJobs-1)+filesLastJob
427 >        blocks = blockSites.keys()
428 >        blockCount = 0
429 >        # Backup variable in case self.maxEvents counted events in a non-included block
430 >        numBlocksInDataset = len(blocks)
431  
432 <        ## set job arguments (files)
432 >        jobCount = 0
433          list_of_lists = []
434 <        lastFile=0
435 <        for i in range(0, int(totalFilesToBeUsed), filesPerJob)[:-1]:
436 <            parString = "\\{"
434 >
435 >        # list tracking which jobs are in which jobs belong to which block
436 >        jobsOfBlock = {}
437 >
438 >        # ---- Iterate over the blocks in the dataset until ---- #
439 >        # ---- we've met the requested total # of events    ---- #
440 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
441 >            block = blocks[blockCount]
442 >            blockCount += 1
443              
444 <            lastFile=i+filesPerJob
445 <            params = self.files[0][i: lastFile]
446 <            for i in range(len(params) - 1):
433 <                parString += '\\\"' + params[i] + '\\\"\,'
444 >            if self.eventsbyblock.has_key(block) :
445 >                numEventsInBlock = self.eventsbyblock[block]
446 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
447              
448 <            parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
449 <            list_of_lists.append([parString])
450 <            pass
451 <
452 <        ## last job
453 <        parString = "\\{"
454 <        
455 <        params = self.files[0][lastFile: lastFile+filesLastJob]
456 <        for i in range(len(params) - 1):
457 <            parString += '\\\"' + params[i] + '\\\"\,'
448 >                files = self.filesbyblock[block]
449 >                numFilesInBlock = len(files)
450 >                if (numFilesInBlock <= 0):
451 >                    continue
452 >                fileCount = 0
453 >
454 >                # ---- New block => New job ---- #
455 >                parString = "\\{"
456 >                # counter for number of events in files currently worked on
457 >                filesEventCount = 0
458 >                # flag if next while loop should touch new file
459 >                newFile = 1
460 >                # job event counter
461 >                jobSkipEventCount = 0
462 >            
463 >                # ---- Iterate over the files in the block until we've met the requested ---- #
464 >                # ---- total # of events or we've gone over all the files in this block  ---- #
465 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
466 >                    file = files[fileCount]
467 >                    if newFile :
468 >                        try:
469 >                            numEventsInFile = self.eventsbyfile[file]
470 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
471 >                            # increase filesEventCount
472 >                            filesEventCount += numEventsInFile
473 >                            # Add file to current job
474 >                            parString += '\\\"' + file + '\\\"\,'
475 >                            newFile = 0
476 >                        except KeyError:
477 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
478 >                        
479 >
480 >                    # if less events in file remain than eventsPerJobRequested
481 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) :
482 >                        # if last file in block
483 >                        if ( fileCount == numFilesInBlock-1 ) :
484 >                            # end job using last file, use remaining events in block
485 >                            # close job and touch new file
486 >                            fullString = parString[:-2]
487 >                            fullString += '\\}'
488 >                            list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
489 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
490 >                            self.jobDestination.append(blockSites[block])
491 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
492 >                            # fill jobs of block dictionary
493 >                            if block in jobsOfBlock.keys() :
494 >                                jobsOfBlock[block].append(jobCount+1)
495 >                            else:
496 >                                jobsOfBlock[block] = [jobCount+1]
497 >                            # reset counter
498 >                            jobCount = jobCount + 1
499 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
500 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
501 >                            jobSkipEventCount = 0
502 >                            # reset file
503 >                            parString = "\\{"
504 >                            filesEventCount = 0
505 >                            newFile = 1
506 >                            fileCount += 1
507 >                        else :
508 >                            # go to next file
509 >                            newFile = 1
510 >                            fileCount += 1
511 >                    # if events in file equal to eventsPerJobRequested
512 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
513 >                        # close job and touch new file
514 >                        fullString = parString[:-2]
515 >                        fullString += '\\}'
516 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
517 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
518 >                        self.jobDestination.append(blockSites[block])
519 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
520 >                        if block in jobsOfBlock.keys() :
521 >                            jobsOfBlock[block].append(jobCount+1)
522 >                        else:
523 >                            jobsOfBlock[block] = [jobCount+1]
524 >                        # reset counter
525 >                        jobCount = jobCount + 1
526 >                        totalEventCount = totalEventCount + eventsPerJobRequested
527 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
528 >                        jobSkipEventCount = 0
529 >                        # reset file
530 >                        parString = "\\{"
531 >                        filesEventCount = 0
532 >                        newFile = 1
533 >                        fileCount += 1
534 >                        
535 >                    # if more events in file remain than eventsPerJobRequested
536 >                    else :
537 >                        # close job but don't touch new file
538 >                        fullString = parString[:-2]
539 >                        fullString += '\\}'
540 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
541 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
542 >                        self.jobDestination.append(blockSites[block])
543 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
544 >                        if block in jobsOfBlock.keys() :
545 >                            jobsOfBlock[block].append(jobCount+1)
546 >                        else:
547 >                            jobsOfBlock[block] = [jobCount+1]
548 >                        # increase counter
549 >                        jobCount = jobCount + 1
550 >                        totalEventCount = totalEventCount + eventsPerJobRequested
551 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
552 >                        # calculate skip events for last file
553 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
554 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
555 >                        # remove all but the last file
556 >                        filesEventCount = self.eventsbyfile[file]
557 >                        parString = "\\{"
558 >                        parString += '\\\"' + file + '\\\"\,'
559 >                    pass # END if
560 >                pass # END while (iterate over files in the block)
561 >        pass # END while (iterate over blocks in the dataset)
562 >        self.ncjobs = self.total_number_of_jobs = jobCount
563 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
564 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
565 >        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
566          
567 <        parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
568 <        list_of_lists.append([parString])
569 <        pass
567 >        # screen output
568 >        screenOutput = "List of jobs and available destination sites:\n\n"
569 >
570 >        blockCounter = 0
571 >        for block in jobsOfBlock.keys():
572 >            blockCounter += 1
573 >            screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),','.join(blockSites[block]))
574 >
575 >        common.logger.message(screenOutput)
576  
577          self.list_of_args = list_of_lists
451        # print self.list_of_args[0]
578          return
579  
580      def jobSplittingNoInput(self):
# Line 465 | Line 591 | class Cmssw(JobType):
591              raise CrabException(msg)
592  
593          if (self.selectEventsPerJob):
594 <            self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
594 >            if (self.selectTotalNumberEvents):
595 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
596 >            elif(self.selectNumberOfJobs) :  
597 >                self.total_number_of_jobs =self.theNumberOfJobs
598 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
599 >
600          elif (self.selectNumberOfJobs) :
601              self.total_number_of_jobs = self.theNumberOfJobs
602              self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
603 <
603 >
604          common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
605  
606          # is there any remainder?
# Line 477 | Line 608 | class Cmssw(JobType):
608  
609          common.logger.debug(5,'Check  '+str(check))
610  
611 <        common.logger.message(str(self.total_number_of_jobs)+' jobs will be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
611 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
612          if check > 0:
613 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but will do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
483 <
613 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
614  
615          # argument is seed number.$i
616          self.list_of_args = []
617          for i in range(self.total_number_of_jobs):
618 +            ## Since there is no input, any site is good
619 +           # self.jobDestination.append(["Any"])
620 +            self.jobDestination.append([""]) #must be empty to write correctly the xml
621 +            args=[]
622 +            if (self.firstRun):
623 +                    ## pythia first run
624 +                #self.list_of_args.append([(str(self.firstRun)+str(i))])
625 +                args.append(str(self.firstRun)+str(i))
626 +            else:
627 +                ## no first run
628 +                #self.list_of_args.append([str(i)])
629 +                args.append(str(i))
630              if (self.sourceSeed):
631 +                args.append(str(self.sourceSeed)+str(i))
632                  if (self.sourceSeedVtx):
633 <                    ## pythia + vtx random seed
634 <                    self.list_of_args.append([
635 <                                              str(self.sourceSeed)+str(i),
636 <                                              str(self.sourceSeedVtx)+str(i)
637 <                                              ])
638 <                else:
639 <                    ## only pythia random seed
640 <                    self.list_of_args.append([(str(self.sourceSeed)+str(i))])
641 <            else:
642 <                ## no random seed
643 <                self.list_of_args.append([str(i)])
644 <        #print self.list_of_args
633 >                    ## + vtx random seed
634 >                    args.append(str(self.sourceSeedVtx)+str(i))
635 >                if (self.sourceSeedG4):
636 >                    ## + G4 random seed
637 >                    args.append(str(self.sourceSeedG4)+str(i))
638 >                if (self.sourceSeedMix):    
639 >                    ## + Mix random seed
640 >                    args.append(str(self.sourceSeedMix)+str(i))
641 >                pass
642 >            pass
643 >            self.list_of_args.append(args)
644 >        pass
645 >            
646 >        # print self.list_of_args
647  
648          return
649  
650 +
651 +    def jobSplittingForScript(self):#CarlosDaniele
652 +        """
653 +        Perform job splitting based on number of job
654 +        """
655 +        common.logger.debug(5,'Splitting per job')
656 +        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
657 +
658 +        self.total_number_of_jobs = self.theNumberOfJobs
659 +
660 +        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
661 +
662 +        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
663 +
664 +        # argument is seed number.$i
665 +        self.list_of_args = []
666 +        for i in range(self.total_number_of_jobs):
667 +            ## Since there is no input, any site is good
668 +           # self.jobDestination.append(["Any"])
669 +            self.jobDestination.append([""])
670 +            ## no random seed
671 +            self.list_of_args.append([str(i)])
672 +        return
673 +
674      def split(self, jobParams):
675  
676          common.jobDB.load()
# Line 517 | Line 686 | class Cmssw(JobType):
686              # print str(arglist[job])
687              # print jobParams[job]
688              common.jobDB.setArguments(job, jobParams[job])
689 +            common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job]))
690 +            common.jobDB.setDestination(job, self.jobDestination[job])
691  
692          common.jobDB.save()
693          return
# Line 531 | Line 702 | class Cmssw(JobType):
702          # Fabio
703          return self.total_number_of_jobs
704  
534    def checkBlackList(self, allSites):
535        if len(self.reCEBlackList)==0: return allSites
536        sites = []
537        for site in allSites:
538            common.logger.debug(10,'Site '+site)
539            good=1
540            for re in self.reCEBlackList:
541                if re.search(site):
542                    common.logger.message('CE in black list, skipping site '+site)
543                    good=0
544                pass
545            if good: sites.append(site)
546        if len(sites) == 0:
547            common.logger.debug(3,"No sites found after BlackList")
548        return sites
549
550    def checkWhiteList(self, allSites):
551
552        if len(self.reCEWhiteList)==0: return allSites
553        sites = []
554        for site in allSites:
555            good=0
556            for re in self.reCEWhiteList:
557                if re.search(site):
558                    common.logger.debug(5,'CE in white list, adding site '+site)
559                    good=1
560                if not good: continue
561                sites.append(site)
562        if len(sites) == 0:
563            common.logger.message("No sites found after WhiteList\n")
564        else:
565            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
566        return sites
567
705      def getTarBall(self, exe):
706          """
707          Return the TarBall with lib and exe
708          """
709          
710          # if it exist, just return it
711 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
711 >        #
712 >        # Marco. Let's start to use relative path for Boss XML files
713 >        #
714 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
715          if os.path.exists(self.tgzNameWithPath):
716              return self.tgzNameWithPath
717  
# Line 585 | Line 725 | class Cmssw(JobType):
725          # First of all declare the user Scram area
726          swArea = self.scram.getSWArea_()
727          #print "swArea = ", swArea
728 <        swVersion = self.scram.getSWVersion()
729 <        #print "swVersion = ", swVersion
728 >        # swVersion = self.scram.getSWVersion()
729 >        # print "swVersion = ", swVersion
730          swReleaseTop = self.scram.getReleaseTop_()
731          #print "swReleaseTop = ", swReleaseTop
732          
# Line 594 | Line 734 | class Cmssw(JobType):
734          if swReleaseTop == '' or swArea == swReleaseTop:
735              return
736  
737 <        filesToBeTarred = []
738 <        ## First find the executable
739 <        if (self.executable != ''):
740 <            exeWithPath = self.scram.findFile_(executable)
741 < #           print exeWithPath
742 <            if ( not exeWithPath ):
743 <                raise CrabException('User executable '+executable+' not found')
744 <
745 <            ## then check if it's private or not
746 <            if exeWithPath.find(swReleaseTop) == -1:
747 <                # the exe is private, so we must ship
748 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
749 <                path = swArea+'/'
750 <                exe = string.replace(exeWithPath, path,'')
751 <                filesToBeTarred.append(exe)
752 <                pass
753 <            else:
754 <                # the exe is from release, we'll find it on WN
755 <                pass
756 <
757 <        ## Now get the libraries: only those in local working area
758 <        libDir = 'lib'
759 <        lib = swArea+'/' +libDir
760 <        common.logger.debug(5,"lib "+lib+" to be tarred")
761 <        if os.path.exists(lib):
762 <            filesToBeTarred.append(libDir)
763 <
764 <        ## Now check if module dir is present
765 <        moduleDir = 'module'
766 <        if os.path.isdir(swArea+'/'+moduleDir):
767 <            filesToBeTarred.append(moduleDir)
768 <
769 <        ## Now check if the Data dir is present
770 <        dataDir = 'src/Data/'
771 <        if os.path.isdir(swArea+'/'+dataDir):
772 <            filesToBeTarred.append(dataDir)
773 <
774 <        ## Create the tar-ball
775 <        if len(filesToBeTarred)>0:
776 <            cwd = os.getcwd()
777 <            os.chdir(swArea)
778 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
779 <            for line in filesToBeTarred:
780 <                tarcmd = tarcmd + line + ' '
781 <            cout = runCommand(tarcmd)
782 <            if not cout:
783 <                raise CrabException('Could not create tar-ball')
784 <            os.chdir(cwd)
785 <        else:
786 <            common.logger.debug(5,"No files to be to be tarred")
737 >        import tarfile
738 >        try: # create tar ball
739 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
740 >            ## First find the executable
741 >            if (self.executable != ''):
742 >                exeWithPath = self.scram.findFile_(executable)
743 >                if ( not exeWithPath ):
744 >                    raise CrabException('User executable '+executable+' not found')
745 >    
746 >                ## then check if it's private or not
747 >                if exeWithPath.find(swReleaseTop) == -1:
748 >                    # the exe is private, so we must ship
749 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
750 >                    path = swArea+'/'
751 >                    # distinguish case when script is in user project area or given by full path somewhere else
752 >                    if exeWithPath.find(path) >= 0 :
753 >                        exe = string.replace(exeWithPath, path,'')
754 >                        tar.add(path+exe,os.path.basename(executable))
755 >                    else :
756 >                        tar.add(exeWithPath,os.path.basename(executable))
757 >                    pass
758 >                else:
759 >                    # the exe is from release, we'll find it on WN
760 >                    pass
761 >    
762 >            ## Now get the libraries: only those in local working area
763 >            libDir = 'lib'
764 >            lib = swArea+'/' +libDir
765 >            common.logger.debug(5,"lib "+lib+" to be tarred")
766 >            if os.path.exists(lib):
767 >                tar.add(lib,libDir)
768 >    
769 >            ## Now check if module dir is present
770 >            moduleDir = 'module'
771 >            module = swArea + '/' + moduleDir
772 >            if os.path.isdir(module):
773 >                tar.add(module,moduleDir)
774 >
775 >            ## Now check if any data dir(s) is present
776 >            swAreaLen=len(swArea)
777 >            for root, dirs, files in os.walk(swArea):
778 >                if "data" in dirs:
779 >                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
780 >                    tar.add(root+"/data",root[swAreaLen:]+"/data")
781 >
782 >            ## Add ProdAgent dir to tar
783 >            paDir = 'ProdAgentApi'
784 >            pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi'
785 >            if os.path.isdir(pa):
786 >                tar.add(pa,paDir)
787 >
788 >            ### FEDE FOR DBS PUBLICATION
789 >            ## Add PRODCOMMON dir to tar
790 >            prodcommonDir = 'ProdCommon'
791 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon'
792 >            if os.path.isdir(prodcommonPath):
793 >                tar.add(prodcommonPath,prodcommonDir)
794 >            #############################    
795 >        
796 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
797 >            tar.close()
798 >        except :
799 >            raise CrabException('Could not create tar-ball')
800 >
801 >        ## check for tarball size
802 >        tarballinfo = os.stat(self.tgzNameWithPath)
803 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
804 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
805 >
806 >        ## create tar-ball with ML stuff
807 >        self.MLtgzfile =  common.work_space.pathForTgz()+'share/MLfiles.tgz'
808 >        try:
809 >            tar = tarfile.open(self.MLtgzfile, "w:gz")
810 >            path=os.environ['CRABDIR'] + '/python/'
811 >            for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py']:
812 >                tar.add(path+file,file)
813 >            common.logger.debug(5,"Files added to "+self.MLtgzfile+" : "+str(tar.getnames()))
814 >            tar.close()
815 >        except :
816 >            raise CrabException('Could not create ML files tar-ball')
817          
818          return
819          
# Line 660 | Line 830 | class Cmssw(JobType):
830          txt += 'if [ $middleware == LCG ]; then \n'
831          txt += self.wsSetupCMSLCGEnvironment_()
832          txt += 'elif [ $middleware == OSG ]; then\n'
833 <        txt += '    time=`date -u +"%s"`\n'
834 <        txt += '    WORKING_DIR=$OSG_WN_TMP/cms_$time\n'
665 <        txt += '    echo "Creating working directory: $WORKING_DIR"\n'
666 <        txt += '    /bin/mkdir -p $WORKING_DIR\n'
833 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
834 >        txt += '    echo "Created working directory: $WORKING_DIR"\n'
835          txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
836          txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
837 <        txt += '        echo "JOB_EXIT_STATUS = 10016"\n'
838 <        txt += '        echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
839 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
837 >        txt += '    echo "JOB_EXIT_STATUS = 10016"\n'
838 >        txt += '    echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
839 >        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
840          txt += '        rm -f $RUNTIME_AREA/$repo \n'
841          txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
842          txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
# Line 684 | Line 852 | class Cmssw(JobType):
852          scram = self.scram.commandName()
853          txt += '\n\n'
854          txt += 'echo "### SPECIFIC JOB SETUP ENVIRONMENT ###"\n'
855 +        txt += 'echo "Setting SCRAM_ARCH='+self.executable_arch+'"\n'
856 +        txt += 'export SCRAM_ARCH='+self.executable_arch+'\n'
857          txt += scram+' project CMSSW '+self.version+'\n'
858          txt += 'status=$?\n'
859          txt += 'if [ $status != 0 ] ; then\n'
# Line 700 | Line 870 | class Cmssw(JobType):
870          txt += '        cd $RUNTIME_AREA\n'
871          txt += '        /bin/rm -rf $WORKING_DIR\n'
872          txt += '        if [ -d $WORKING_DIR ] ;then\n'
873 <        txt += '            echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
874 <        txt += '            echo "JOB_EXIT_STATUS = 10018"\n'
875 <        txt += '            echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
876 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
873 >        txt += '        echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
874 >        txt += '        echo "JOB_EXIT_STATUS = 10018"\n'
875 >        txt += '        echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
876 >        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
877          txt += '            rm -f $RUNTIME_AREA/$repo \n'
878          txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
879          txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
# Line 714 | Line 884 | class Cmssw(JobType):
884          txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
885          txt += 'cd '+self.version+'\n'
886          ### needed grep for bug in scramv1 ###
887 +        txt += scram+' runtime -sh\n'
888          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
889 +        txt += 'echo $PATH\n'
890  
891          # Handle the arguments:
892          txt += "\n"
# Line 736 | Line 908 | class Cmssw(JobType):
908          txt += '        cd $RUNTIME_AREA\n'
909          txt += '        /bin/rm -rf $WORKING_DIR\n'
910          txt += '        if [ -d $WORKING_DIR ] ;then\n'
911 <        txt += '            echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
912 <        txt += '            echo "JOB_EXIT_STATUS = 50114"\n'
913 <        txt += '            echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
914 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
911 >        txt += '        echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
912 >        txt += '        echo "JOB_EXIT_STATUS = 50114"\n'
913 >        txt += '        echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
914 >        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
915          txt += '            rm -f $RUNTIME_AREA/$repo \n'
916          txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
917          txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
# Line 751 | Line 923 | class Cmssw(JobType):
923  
924          # Prepare job-specific part
925          job = common.job_list[nj]
926 <        pset = os.path.basename(job.configFilename())
927 <        txt += '\n'
928 <        if (self.datasetPath): # standard job
929 <            #txt += 'InputFiles=$2\n'
930 <            txt += 'InputFiles=${args[1]}\n'
931 <            txt += 'echo "Inputfiles:<$InputFiles>"\n'
932 <            txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
933 <        else:  # pythia like job
934 <            if (self.sourceSeed):
935 <                txt += 'Seed=$2\n'
936 <                txt += 'echo "Seed: <$Seed>"\n'
765 <                txt += 'sed "s#\<INPUT\>#$Seed#" $RUNTIME_AREA/'+pset+' > tmp.cfg\n'
766 <                if (self.sourceSeedVtx):
767 <                    txt += 'VtxSeed=$3\n'
768 <                    txt += 'echo "VtxSeed: <$VtxSeed>"\n'
769 <                    txt += 'sed "s#INPUTVTX#$VtxSeed#" tmp.cfg > pset.cfg\n'
770 <                else:
771 <                    txt += 'mv tmp.cfg pset.cfg\n'
772 <            else:
773 <                txt += '# Copy untouched pset\n'
774 <                txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n'
926 >        ### FEDE FOR DBS OUTPUT PUBLICATION
927 >        if (self.datasetPath):
928 >            txt += '\n'
929 >            txt += 'DatasetPath='+self.datasetPath+'\n'
930 >
931 >            datasetpath_split = self.datasetPath.split("/")
932 >            
933 >            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
934 >            txt += 'DataTier='+datasetpath_split[2]+'\n'
935 >            txt += 'ProcessedDataset='+datasetpath_split[3]+'\n'
936 >            txt += 'ApplicationFamily=Online\n'
937  
938 +        else:
939 +            txt += 'DatasetPath=MCDataTier\n'
940 +            txt += 'PrimaryDataset=null\n'
941 +            txt += 'DataTier=null\n'
942 +            txt += 'ProcessedDataset=null\n'
943 +            txt += 'ApplicationFamily=MCDataTier\n'
944 +        ##################    
945 +        if self.pset != None: #CarlosDaniele
946 +            pset = os.path.basename(job.configFilename())
947 +            txt += '\n'
948 +            if (self.datasetPath): # standard job
949 +                #txt += 'InputFiles=$2\n'
950 +                txt += 'InputFiles=${args[1]}\n'
951 +                txt += 'MaxEvents=${args[2]}\n'
952 +                txt += 'SkipEvents=${args[3]}\n'
953 +                txt += 'echo "Inputfiles:<$InputFiles>"\n'
954 +                txt += 'sed "s#{\'INPUT\'}#$InputFiles#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
955 +                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
956 +                txt += 'sed "s#INPUTMAXEVENTS#$MaxEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
957 +                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
958 +                txt += 'sed "s#INPUTSKIPEVENTS#$SkipEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
959 +            else:  # pythia like job
960 +                seedIndex=1
961 +                if (self.firstRun):
962 +                    txt += 'FirstRun=${args['+str(seedIndex)+']}\n'
963 +                    txt += 'echo "FirstRun: <$FirstRun>"\n'
964 +                    txt += 'sed "s#\<INPUTFIRSTRUN\>#$FirstRun#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
965 +                    seedIndex=seedIndex+1
966 +
967 +                if (self.sourceSeed):
968 +                    txt += 'Seed=${args['+str(seedIndex)+']}\n'
969 +                    txt += 'sed "s#\<INPUT\>#$Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
970 +                    seedIndex=seedIndex+1
971 +                    ## the following seeds are not always present
972 +                    if (self.sourceSeedVtx):
973 +                        txt += 'VtxSeed=${args['+str(seedIndex)+']}\n'
974 +                        txt += 'echo "VtxSeed: <$VtxSeed>"\n'
975 +                        txt += 'sed "s#\<INPUTVTX\>#$VtxSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
976 +                        seedIndex += 1
977 +                    if (self.sourceSeedG4):
978 +                        txt += 'G4Seed=${args['+str(seedIndex)+']}\n'
979 +                        txt += 'echo "G4Seed: <$G4Seed>"\n'
980 +                        txt += 'sed "s#\<INPUTG4\>#$G4Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
981 +                        seedIndex += 1
982 +                    if (self.sourceSeedMix):
983 +                        txt += 'mixSeed=${args['+str(seedIndex)+']}\n'
984 +                        txt += 'echo "MixSeed: <$mixSeed>"\n'
985 +                        txt += 'sed "s#\<INPUTMIX\>#$mixSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
986 +                        seedIndex += 1
987 +                    pass
988 +                pass
989 +            txt += 'mv -f '+pset+' pset.cfg\n'
990  
991          if len(self.additional_inbox_files) > 0:
992              for file in self.additional_inbox_files:
# Line 783 | Line 997 | class Cmssw(JobType):
997                  txt += 'fi\n'
998              pass
999  
1000 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
1001 <
1002 <        txt += '\n'
1003 <        txt += 'echo "***** cat pset.cfg *********"\n'
1004 <        txt += 'cat pset.cfg\n'
1005 <        txt += 'echo "****** end pset.cfg ********"\n'
1006 <        txt += '\n'
1007 <        # txt += 'echo "***** cat pset1.cfg *********"\n'
1008 <        # txt += 'cat pset1.cfg\n'
1009 <        # txt += 'echo "****** end pset1.cfg ********"\n'
1000 >        if self.pset != None: #CarlosDaniele
1001 >            txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
1002 >        
1003 >            txt += '\n'
1004 >            txt += 'echo "***** cat pset.cfg *********"\n'
1005 >            txt += 'cat pset.cfg\n'
1006 >            txt += 'echo "****** end pset.cfg ********"\n'
1007 >            txt += '\n'
1008 >            ### FEDE FOR DBS OUTPUT PUBLICATION
1009 >            txt += 'PSETHASH=`EdmConfigHash < pset.cfg`'
1010 >            ##############
1011 >            txt += '\n'
1012 >            # txt += 'echo "***** cat pset1.cfg *********"\n'
1013 >            # txt += 'cat pset1.cfg\n'
1014 >            # txt += 'echo "****** end pset1.cfg ********"\n'
1015          return txt
1016  
1017 <    def wsBuildExe(self, nj):
1017 >    def wsBuildExe(self, nj=0):
1018          """
1019          Put in the script the commands to build an executable
1020          or a library.
# Line 830 | Line 1049 | class Cmssw(JobType):
1049              txt += 'else \n'
1050              txt += '   echo "Successful untar" \n'
1051              txt += 'fi \n'
1052 +            txt += '\n'
1053 +            txt += 'echo "Include ProdAgentApi and PRODCOMMON in PYTHONPATH"\n'
1054 +            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
1055 +            #### FEDE FOR DBS OUTPUT PUBLICATION
1056 +            txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon\n'
1057 +            #txt += '   export PYTHONPATH=ProdAgentApi\n'
1058 +            txt += 'else\n'
1059 +            txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon:${PYTHONPATH}\n'
1060 +            #txt += '   export PYTHONPATH=ProdAgentApi:${PYTHONPATH}\n'
1061 +            txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1062 +            ###################  
1063 +            txt += 'fi\n'
1064 +            txt += '\n'
1065 +
1066              pass
1067          
1068          return txt
# Line 841 | Line 1074 | class Cmssw(JobType):
1074          """
1075          
1076      def executableName(self):
1077 <        return self.executable
1077 >        if self.scriptExe: #CarlosDaniele
1078 >            return "sh "
1079 >        else:
1080 >            return self.executable
1081  
1082      def executableArgs(self):
1083 <        return " -p pset.cfg"
1083 >        if self.scriptExe:#CarlosDaniele
1084 >            return   self.scriptExe + " $NJob"
1085 >        else:
1086 >            return " -p pset.cfg"
1087  
1088      def inputSandbox(self, nj):
1089          """
1090          Returns a list of filenames to be put in JDL input sandbox.
1091          """
1092          inp_box = []
1093 <        # dict added to delete duplicate from input sandbox file list
1094 <        seen = {}
1093 >        # # dict added to delete duplicate from input sandbox file list
1094 >        # seen = {}
1095          ## code
1096          if os.path.isfile(self.tgzNameWithPath):
1097              inp_box.append(self.tgzNameWithPath)
1098 +        if os.path.isfile(self.MLtgzfile):
1099 +            inp_box.append(self.MLtgzfile)
1100          ## config
1101 <        inp_box.append(common.job_list[nj].configFilename())
1101 >        if not self.pset is None:
1102 >            inp_box.append(common.work_space.pathForTgz() + 'job/' + self.configFilename())
1103          ## additional input files
1104 <        #for file in self.additional_inbox_files:
1105 <        #    inp_box.append(common.work_space.cwdDir()+file)
1104 >        for file in self.additional_inbox_files:
1105 >            inp_box.append(file)
1106          return inp_box
1107  
1108      def outputSandbox(self, nj):
# Line 869 | Line 1111 | class Cmssw(JobType):
1111          """
1112          out_box = []
1113  
872        stdout=common.job_list[nj].stdout()
873        stderr=common.job_list[nj].stderr()
874
1114          ## User Declared output files
1115 <        for out in self.output_file:
1115 >        for out in (self.output_file+self.output_file_sandbox):
1116              n_out = nj + 1
1117              out_box.append(self.numberFile_(out,str(n_out)))
1118          return out_box
880        return []
1119  
1120      def prepareSteeringCards(self):
1121          """
# Line 893 | Line 1131 | class Cmssw(JobType):
1131          txt = '\n'
1132          txt += '# directory content\n'
1133          txt += 'ls \n'
1134 <        file_list = ''
1135 <        for fileWithSuffix in self.output_file:
1134 >
1135 >        for fileWithSuffix in (self.output_file+self.output_file_sandbox):
1136              output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
899            file_list=file_list+output_file_num+' '
1137              txt += '\n'
1138              txt += '# check output file\n'
1139              txt += 'ls '+fileWithSuffix+'\n'
1140              txt += 'ls_result=$?\n'
904            #txt += 'exe_result=$?\n'
1141              txt += 'if [ $ls_result -ne 0 ] ; then\n'
1142 +            txt += '   exit_status=60302\n'
1143              txt += '   echo "ERROR: Problem with output file"\n'
907            #txt += '   echo "JOB_EXIT_STATUS = $exe_result"\n'
908            #txt += '   echo "JobExitCode=60302" | tee -a $RUNTIME_AREA/$repo\n'
909            #txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
910            ### OLI_DANIELE
1144              if common.scheduler.boss_scheduler_name == 'condor_g':
1145                  txt += '    if [ $middleware == OSG ]; then \n'
1146                  txt += '        echo "prepare dummy output file"\n'
1147                  txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1148                  txt += '    fi \n'
1149              txt += 'else\n'
1150 <            txt += '   cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1150 >            ### FEDE FOR DBS OUTPUT PUBLICATION
1151 >            txt += '   mv '+fileWithSuffix+' $RUNTIME_AREA\n'
1152 >            txt += '   cp $RUNTIME_AREA/'+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1153 >            #################################
1154              txt += 'fi\n'
1155        
1156          txt += 'cd $RUNTIME_AREA\n'
921        file_list=file_list[:-1]
922        txt += 'file_list="'+file_list+'"\n'
923        txt += 'cd $RUNTIME_AREA\n'
1157          ### OLI_DANIELE
1158          txt += 'if [ $middleware == OSG ]; then\n'  
1159          txt += '    cd $RUNTIME_AREA\n'
1160          txt += '    echo "Remove working directory: $WORKING_DIR"\n'
1161          txt += '    /bin/rm -rf $WORKING_DIR\n'
1162          txt += '    if [ -d $WORKING_DIR ] ;then\n'
1163 <        txt += '        echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1164 <        txt += '        echo "JOB_EXIT_STATUS = 60999"\n'
1165 <        txt += '        echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1166 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1163 >        txt += '    echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1164 >        txt += '    echo "JOB_EXIT_STATUS = 60999"\n'
1165 >        txt += '    echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1166 >        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
1167          txt += '        rm -f $RUNTIME_AREA/$repo \n'
1168          txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1169          txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1170          txt += '    fi\n'
1171          txt += 'fi\n'
1172          txt += '\n'
1173 +
1174 +        file_list = ''
1175 +        ## Add to filelist only files to be possibly copied to SE
1176 +        for fileWithSuffix in self.output_file:
1177 +            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1178 +            file_list=file_list+output_file_num+' '
1179 +        file_list=file_list[:-1]
1180 +        txt += 'file_list="'+file_list+'"\n'
1181 +
1182          return txt
1183  
1184      def numberFile_(self, file, txt):
# Line 947 | Line 1189 | class Cmssw(JobType):
1189          # take away last extension
1190          name = p[0]
1191          for x in p[1:-1]:
1192 <           name=name+"."+x
1192 >            name=name+"."+x
1193          # add "_txt"
1194          if len(p)>1:
1195 <          ext = p[len(p)-1]
1196 <          #result = name + '_' + str(txt) + "." + ext
955 <          result = name + '_' + txt + "." + ext
1195 >            ext = p[len(p)-1]
1196 >            result = name + '_' + txt + "." + ext
1197          else:
1198 <          #result = name + '_' + str(txt)
958 <          result = name + '_' + txt
1198 >            result = name + '_' + txt
1199          
1200          return result
1201  
1202 <    def getRequirements(self):
1202 >    def getRequirements(self, nj=[]):
1203          """
1204          return job requirements to add to jdl files
1205          """
1206          req = ''
1207 <        if common.analisys_common_info['sw_version']:
1207 >        if self.version:
1208              req='Member("VO-cms-' + \
1209 <                 common.analisys_common_info['sw_version'] + \
1209 >                 self.version + \
1210                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1211 <        if common.analisys_common_info['sites']:
1212 <            if len(common.analisys_common_info['sites'])>0:
1213 <                req = req + ' && ('
1214 <                for i in range(len(common.analisys_common_info['sites'])):
1215 <                    req = req + 'other.GlueCEInfoHostName == "' \
1216 <                         + common.analisys_common_info['sites'][i] + '"'
1217 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
978 <                        req = req + ' || '
979 <            req = req + ')'
980 <        #print "req = ", req
1211 >        # if self.executable_arch:
1212 >        #     req='Member("VO-cms-' + \
1213 >        #          self.executable_arch + \
1214 >        #          '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1215 >
1216 >        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1217 >
1218          return req
1219  
1220      def configFilename(self):
# Line 994 | Line 1231 | class Cmssw(JobType):
1231          txt += '   echo "### SETUP CMS OSG  ENVIRONMENT ###"\n'
1232          txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
1233          txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
1234 +        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
1235          txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1236 <        txt += '   elif [ -f $OSG_APP/cmssoft/cmsset_default.sh ] ;then\n'
1237 <        txt += '      # Use $OSG_APP/cmssoft/cmsset_default.sh to setup cms software\n'
1238 <        txt += '       source $OSG_APP/cmssoft/cmsset_default.sh '+self.version+'\n'
1236 >        txt += '   elif [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1237 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1238 >        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
1239 >        txt += '       source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1240          txt += '   else\n'
1241 <        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1241 >        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1242          txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
1243          txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1244          txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
# Line 1012 | Line 1251 | class Cmssw(JobType):
1251          txt += '       cd $RUNTIME_AREA\n'
1252          txt += '       /bin/rm -rf $WORKING_DIR\n'
1253          txt += '       if [ -d $WORKING_DIR ] ;then\n'
1254 <        txt += '            echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1255 <        txt += '            echo "JOB_EXIT_STATUS = 10017"\n'
1256 <        txt += '            echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1257 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
1254 >        txt += '        echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1255 >        txt += '        echo "JOB_EXIT_STATUS = 10017"\n'
1256 >        txt += '        echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1257 >        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1258          txt += '            rm -f $RUNTIME_AREA/$repo \n'
1259          txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1260          txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
# Line 1073 | Line 1312 | class Cmssw(JobType):
1312          txt += '       fi\n'
1313          txt += '   fi\n'
1314          txt += '   \n'
1076        txt += '   string=`cat /etc/redhat-release`\n'
1077        txt += '   echo $string\n'
1078        txt += '   if [[ $string = *alhalla* ]]; then\n'
1079        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1080        txt += '   elif [[ $string = *Enterprise* ]] || [[ $string = *cientific* ]]; then\n'
1081        txt += '       export SCRAM_ARCH=slc3_ia32_gcc323\n'
1082        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1083        txt += '   else\n'
1084        txt += '       echo "SET_CMS_ENV 10033 ==> ERROR OS unknown, LCG environment not initialized"\n'
1085        txt += '       echo "JOB_EXIT_STATUS = 10033"\n'
1086        txt += '       echo "JobExitCode=10033" | tee -a $RUNTIME_AREA/$repo\n'
1087        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1088        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1089        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1090        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1091        txt += '       exit 1\n'
1092        txt += '   fi\n'
1315          txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1316          txt += '   echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
1317          return txt
1318  
1319 +    ### FEDE FOR DBS OUTPUT PUBLICATION
1320 +    def modifyReport(self, nj):
1321 +        """
1322 +        insert the part of the script that modifies the FrameworkJob Report
1323 +        """
1324 +        
1325 +        txt = ''
1326 +        txt += 'echo "Modify Job Report" \n'
1327 +        txt += 'chmod a+x $RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1328 +        txt += 'if [ -z "$SE" ]; then\n'
1329 +        txt += '    SE="" \n'
1330 +        txt += 'fi \n'
1331 +        txt += 'if [ -z "$SE_PATH" ]; then\n'
1332 +        txt += '    SE_PATH="" \n'
1333 +        txt += 'fi \n'
1334 +        txt += 'echo "SE = $SE"\n'
1335 +        txt += 'echo "SE_PATH = $SE_PATH"\n'
1336 +        txt += 'FOR_LFN=$DatasetPath/$MonitorID\n'
1337 +        txt += 'echo "FOR_LFN = $FOR_LFN"\n'
1338 +        txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n'
1339 +        txt += 'echo "$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1340 +        txt += '$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1341 +        txt += 'modifyReport_result=$?\n'
1342 +        txt += 'echo modifyReport_result = $modifyReport_result\n'
1343 +        txt += 'if [ $modify_result -ne 0 ]; then\n'
1344 +        txt += '    exit_status=1\n'
1345 +        txt += '    echo "ERROR: Problem with ModifyJobReport"\n'
1346 +        txt += 'else\n'
1347 +        txt += '    mv NewFrameworkJobReport.xml crab_fjr_$NJob.xml\n'
1348 +        txt += 'fi\n'
1349 +        return txt
1350 +    #################
1351 +
1352      def setParam_(self, param, value):
1353          self._params[param] = value
1354  
# Line 1105 | Line 1360 | class Cmssw(JobType):
1360          
1361      def getTaskid(self):
1362          return self._taskId
1363 +
1364 + #######################################################################
1365 +    def uniquelist(self, old):
1366 +        """
1367 +        remove duplicates from a list
1368 +        """
1369 +        nd={}
1370 +        for e in old:
1371 +            nd[e]=0
1372 +        return nd.keys()

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines