ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.23 by slacapra, Wed Jul 5 14:10:24 2006 UTC vs.
Revision 1.107 by gutsche, Tue Jul 24 19:49:35 2007 UTC

# Line 2 | Line 2 | from JobType import JobType
2   from crab_logger import Logger
3   from crab_exceptions import *
4   from crab_util import *
5 import math
5   import common
7 import PsetManipulator  
8
9 import DBSInfo_EDM
10 import DataDiscovery_EDM
11 import DataLocation_EDM
6   import Scram
7  
8 < import os, string, re
8 > import os, string, glob
9  
10   class Cmssw(JobType):
11 <    def __init__(self, cfg_params):
11 >    def __init__(self, cfg_params, ncjobs):
12          JobType.__init__(self, 'CMSSW')
13          common.logger.debug(3,'CMSSW::__init__')
14  
21        self.analisys_common_info = {}
22        # Marco.
15          self._params = {}
16          self.cfg_params = cfg_params
17 +
18 +        try:
19 +            self.MaxTarBallSize = float(self.cfg_params['EDG.maxtarballsize'])
20 +        except KeyError:
21 +            self.MaxTarBallSize = 9.5
22 +
23 +        # number of jobs requested to be created, limit obj splitting
24 +        self.ncjobs = ncjobs
25 +
26          log = common.logger
27          
28          self.scram = Scram.Scram(cfg_params)
28        scramArea = ''
29          self.additional_inbox_files = []
30          self.scriptExe = ''
31          self.executable = ''
32 +        self.executable_arch = self.scram.getArch()
33          self.tgz_name = 'default.tgz'
34 +        self.additional_tgz_name = 'additional.tgz'
35 +        self.scriptName = 'CMSSW.sh'
36 +        self.pset = ''      #scrip use case Da  
37 +        self.datasetPath = '' #scrip use case Da
38  
39 +        # set FJR file name
40 +        self.fjrFileName = 'crab_fjr.xml'
41  
42          self.version = self.scram.getSWVersion()
43 +        common.taskDB.setDict('codeVersion',self.version)
44          self.setParam_('application', self.version)
37        common.analisys_common_info['sw_version'] = self.version
38        ### FEDE
39        common.analisys_common_info['copy_input_data'] = 0
40        common.analisys_common_info['events_management'] = 1
45  
46          ### collect Data cards
47 +
48 +        ## get DBS mode
49 +        try:
50 +            self.use_dbs_1 = int(self.cfg_params['CMSSW.use_dbs_1'])
51 +        except KeyError:
52 +            self.use_dbs_1 = 0
53 +            
54          try:
55              tmp =  cfg_params['CMSSW.datasetpath']
56              log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
# Line 59 | Line 70 | class Cmssw(JobType):
70              self.setParam_('dataset', 'None')
71              self.setParam_('owner', 'None')
72          else:
73 <            datasetpath_split = self.datasetPath.split("/")
74 <            self.setParam_('dataset', datasetpath_split[1])
75 <            self.setParam_('owner', datasetpath_split[-1])
76 <
73 >            try:
74 >                datasetpath_split = self.datasetPath.split("/")
75 >                # standard style
76 >                if self.use_dbs_1 == 1 :
77 >                    self.setParam_('dataset', datasetpath_split[1])
78 >                    self.setParam_('owner', datasetpath_split[-1])
79 >                else:
80 >                    self.setParam_('dataset', datasetpath_split[1])
81 >                    self.setParam_('owner', datasetpath_split[2])
82 >            except:
83 >                self.setParam_('dataset', self.datasetPath)
84 >                self.setParam_('owner', self.datasetPath)
85 >                
86          self.setTaskid_()
87          self.setParam_('taskId', self.cfg_params['taskId'])
88  
# Line 85 | Line 105 | class Cmssw(JobType):
105          try:
106              self.pset = cfg_params['CMSSW.pset']
107              log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
108 <            if (not os.path.exists(self.pset)):
109 <                raise CrabException("User defined PSet file "+self.pset+" does not exist")
108 >            if self.pset.lower() != 'none' :
109 >                if (not os.path.exists(self.pset)):
110 >                    raise CrabException("User defined PSet file "+self.pset+" does not exist")
111 >            else:
112 >                self.pset = None
113          except KeyError:
114              raise CrabException("PSet file missing. Cannot run cmsRun ")
115  
116          # output files
117 +        ## stuff which must be returned always via sandbox
118 +        self.output_file_sandbox = []
119 +
120 +        # add fjr report by default via sandbox
121 +        self.output_file_sandbox.append(self.fjrFileName)
122 +
123 +        # other output files to be returned via sandbox or copied to SE
124          try:
125              self.output_file = []
96
126              tmp = cfg_params['CMSSW.output_file']
127              if tmp != '':
128                  tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
# Line 103 | Line 132 | class Cmssw(JobType):
132                      self.output_file.append(tmp)
133                      pass
134              else:
135 <                log.message("No output file defined: only stdout/err will be available")
135 >                log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
136                  pass
137              pass
138          except KeyError:
139 <            log.message("No output file defined: only stdout/err will be available")
139 >            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available\n")
140              pass
141  
142          # script_exe file as additional file in inputSandbox
143          try:
144              self.scriptExe = cfg_params['USER.script_exe']
116            self.additional_inbox_files.append(self.scriptExe)
145              if self.scriptExe != '':
146                 if not os.path.isfile(self.scriptExe):
147 <                  msg ="WARNING. file "+self.scriptExe+" not found"
147 >                  msg ="ERROR. file "+self.scriptExe+" not found"
148                    raise CrabException(msg)
149 +               self.additional_inbox_files.append(string.strip(self.scriptExe))
150          except KeyError:
151 <           pass
152 <                  
151 >            self.scriptExe = ''
152 >
153 >        #CarlosDaniele
154 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
155 >           msg ="Error. script_exe  not defined"
156 >           raise CrabException(msg)
157 >
158          ## additional input files
159          try:
160 <            tmpAddFiles = string.split(cfg_params['CMSSW.additional_input_files'],',')
160 >            tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
161              for tmp in tmpAddFiles:
162 <                if not os.path.exists(tmp):
163 <                    raise CrabException("Additional input file not found: "+tmp)
164 <                tmp=string.strip(tmp)
165 <                self.additional_inbox_files.append(tmp)
162 >                tmp = string.strip(tmp)
163 >                dirname = ''
164 >                if not tmp[0]=="/": dirname = "."
165 >                files = []
166 >                if string.find(tmp,"*")>-1:
167 >                    files = glob.glob(os.path.join(dirname, tmp))
168 >                    if len(files)==0:
169 >                        raise CrabException("No additional input file found with this pattern: "+tmp)
170 >                else:
171 >                    files.append(tmp)
172 >                for file in files:
173 >                    if not os.path.exists(file):
174 >                        raise CrabException("Additional input file not found: "+file)
175 >                    pass
176 >                    # fname = string.split(file, '/')[-1]
177 >                    # storedFile = common.work_space.pathForTgz()+'share/'+fname
178 >                    # shutil.copyfile(file, storedFile)
179 >                    self.additional_inbox_files.append(string.strip(file))
180                  pass
181              pass
182 +            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
183          except KeyError:
184              pass
185  
186          # files per job
187          try:
188 <            self.filesPerJob = int(cfg_params['CMSSW.files_per_jobs']) #Daniele
189 <            self.selectFilesPerJob = 1
188 >            if (cfg_params['CMSSW.files_per_jobs']):
189 >                raise CrabException("files_per_jobs no longer supported.  Quitting.")
190          except KeyError:
191 <            self.filesPerJob = 0
143 <            self.selectFilesPerJob = 0
191 >            pass
192  
193          ## Events per job
194          try:
# Line 158 | Line 206 | class Cmssw(JobType):
206              self.theNumberOfJobs = 0
207              self.selectNumberOfJobs = 0
208  
209 +        try:
210 +            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
211 +            self.selectTotalNumberEvents = 1
212 +        except KeyError:
213 +            self.total_number_of_events = 0
214 +            self.selectTotalNumberEvents = 0
215 +
216 +        if self.pset != None: #CarlosDaniele
217 +             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
218 +                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
219 +                 raise CrabException(msg)
220 +        else:
221 +             if (self.selectNumberOfJobs == 0):
222 +                 msg = 'Must specify  number_of_jobs.'
223 +                 raise CrabException(msg)
224 +
225          ## source seed for pythia
226          try:
227              self.sourceSeed = int(cfg_params['CMSSW.pythia_seed'])
# Line 165 | Line 229 | class Cmssw(JobType):
229              self.sourceSeed = None
230              common.logger.debug(5,"No seed given")
231  
168        if not (self.selectFilesPerJob + self.selectEventsPerJob + self.selectNumberOfJobs == 1 ):
169            msg = 'Must define either files_per_jobs or events_per_job or number_of_jobs'
170            raise CrabException(msg)
171
232          try:
233 <            self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
233 >            self.sourceSeedVtx = int(cfg_params['CMSSW.vtx_seed'])
234          except KeyError:
235 <            msg = 'Must define total_number_of_events'
236 <            raise CrabException(msg)
237 <        
178 <        CEBlackList = []
235 >            self.sourceSeedVtx = None
236 >            common.logger.debug(5,"No vertex seed given")
237 >
238          try:
239 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
181 <            for tmp in tmpBad:
182 <                tmp=string.strip(tmp)
183 <                CEBlackList.append(tmp)
239 >            self.sourceSeedG4 = int(cfg_params['CMSSW.g4_seed'])
240          except KeyError:
241 <            pass
241 >            self.sourceSeedG4 = None
242 >            common.logger.debug(5,"No g4 sim hits seed given")
243  
244 <        self.reCEBlackList=[]
245 <        for bad in CEBlackList:
189 <            self.reCEBlackList.append(re.compile( bad ))
190 <
191 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
192 <
193 <        CEWhiteList = []
194 <        try:
195 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
196 <            for tmp in tmpGood:
197 <                tmp=string.strip(tmp)
198 <                CEWhiteList.append(tmp)
244 >        try:
245 >            self.sourceSeedMix = int(cfg_params['CMSSW.mix_seed'])
246          except KeyError:
247 <            pass
248 <
202 <        #print 'CEWhiteList: ',CEWhiteList
203 <        self.reCEWhiteList=[]
204 <        for Good in CEWhiteList:
205 <            self.reCEWhiteList.append(re.compile( Good ))
247 >            self.sourceSeedMix = None
248 >            common.logger.debug(5,"No mix seed given")
249  
250 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
251 <
252 <        self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
250 >        try:
251 >            self.firstRun = int(cfg_params['CMSSW.first_run'])
252 >        except KeyError:
253 >            self.firstRun = None
254 >            common.logger.debug(5,"No first run given")
255 >        if self.pset != None: #CarlosDaniele
256 >            ver = string.split(self.version,"_")
257 >            if (int(ver[1])>=1 and int(ver[2])>=5):
258 >                import PsetManipulator150 as pp
259 >            else:
260 >                import PsetManipulator as pp
261 >            PsetEdit = pp.PsetManipulator(self.pset) #Daniele Pset
262  
263          #DBSDLS-start
264          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
265          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
266          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
267 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
268          ## Perform the data location and discovery (based on DBS/DLS)
269          ## SL: Don't if NONE is specified as input (pythia use case)
270 <        common.analisys_common_info['sites']=None
270 >        blockSites = {}
271          if self.datasetPath:
272 <            self.DataDiscoveryAndLocation(cfg_params)
272 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
273          #DBSDLS-end          
274  
275          self.tgzNameWithPath = self.getTarBall(self.executable)
276      
277          ## Select Splitting
278 <        if self.selectNoInput: self.jobSplittingNoInput()
279 <        elif self.selectFilesPerJob or self.selectEventsPerJob or self.selectNumberOfJobs: self.jobSplittingPerFiles()
278 >        if self.selectNoInput:
279 >            if self.pset == None: #CarlosDaniele
280 >                self.jobSplittingForScript()
281 >            else:
282 >                self.jobSplittingNoInput()
283          else:
284 <            msg = 'Don\'t know how to split...'
229 <            raise CrabException(msg)
284 >            self.jobSplittingByBlocks(blockSites)
285  
286          # modify Pset
287 <        try:
288 <            if (self.datasetPath): # standard job
289 <                #self.PsetEdit.maxEvent(self.eventsPerJob)
290 <                # always process all events in a file
291 <                self.PsetEdit.maxEvent("-1")
292 <                self.PsetEdit.inputModule("INPUT")
293 <
294 <            else:  # pythia like job
295 <                self.PsetEdit.maxEvent(self.eventsPerJob)
296 <                if (self.sourceSeed) :
297 <                    self.PsetEdit.pythiaSeed("INPUT")
298 <        
299 <            self.PsetEdit.psetWriter(self.configFilename())
300 <        except:
301 <            msg='Error while manipuliating ParameterSet: exiting...'
302 <            raise CrabException(msg)
287 >        if self.pset != None: #CarlosDaniele
288 >            try:
289 >                if (self.datasetPath): # standard job
290 >                    # allow to processa a fraction of events in a file
291 >                    PsetEdit.inputModule("INPUT")
292 >                    PsetEdit.maxEvent("INPUTMAXEVENTS")
293 >                    PsetEdit.skipEvent("INPUTSKIPEVENTS")
294 >                else:  # pythia like job
295 >                    PsetEdit.maxEvent(self.eventsPerJob)
296 >                    if (self.firstRun):
297 >                        PsetEdit.pythiaFirstRun("INPUTFIRSTRUN")  #First Run
298 >                    if (self.sourceSeed) :
299 >                        PsetEdit.pythiaSeed("INPUT")
300 >                        if (self.sourceSeedVtx) :
301 >                            PsetEdit.vtxSeed("INPUTVTX")
302 >                        if (self.sourceSeedG4) :
303 >                            self.PsetEdit.g4Seed("INPUTG4")
304 >                        if (self.sourceSeedMix) :
305 >                            self.PsetEdit.mixSeed("INPUTMIX")
306 >                # add FrameworkJobReport to parameter-set
307 >                PsetEdit.addCrabFJR(self.fjrFileName)
308 >                PsetEdit.psetWriter(self.configFilename())
309 >            except:
310 >                msg='Error while manipuliating ParameterSet: exiting...'
311 >                raise CrabException(msg)
312  
313      def DataDiscoveryAndLocation(self, cfg_params):
314  
315 +        import DataDiscovery
316 +        import DataDiscovery_DBS2
317 +        import DataLocation
318          common.logger.debug(10,"CMSSW::DataDiscoveryAndLocation()")
319  
320          datasetPath=self.datasetPath
321  
255        ## TODO
256        dataTiersList = ""
257        dataTiers = dataTiersList.split(',')
258
322          ## Contact the DBS
323 +        common.logger.message("Contacting Data Discovery Services ...")
324          try:
325 <            self.pubdata=DataDiscovery_EDM.DataDiscovery_EDM(datasetPath, dataTiers, cfg_params)
325 >
326 >            if self.use_dbs_1 == 1 :
327 >                self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
328 >            else :
329 >                self.pubdata=DataDiscovery_DBS2.DataDiscovery_DBS2(datasetPath, cfg_params)
330              self.pubdata.fetchDBSInfo()
331  
332 <        except DataDiscovery_EDM.NotExistingDatasetError, ex :
332 >        except DataDiscovery.NotExistingDatasetError, ex :
333              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
334              raise CrabException(msg)
335 <
336 <        except DataDiscovery_EDM.NoDataTierinProvenanceError, ex :
335 >        except DataDiscovery.NoDataTierinProvenanceError, ex :
336 >            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
337 >            raise CrabException(msg)
338 >        except DataDiscovery.DataDiscoveryError, ex:
339 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
340 >            raise CrabException(msg)
341 >        except DataDiscovery_DBS2.NotExistingDatasetError_DBS2, ex :
342 >            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
343 >            raise CrabException(msg)
344 >        except DataDiscovery_DBS2.NoDataTierinProvenanceError_DBS2, ex :
345              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
346              raise CrabException(msg)
347 <        except DataDiscovery_EDM.DataDiscoveryError, ex:
348 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
347 >        except DataDiscovery_DBS2.DataDiscoveryError_DBS2, ex:
348 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
349              raise CrabException(msg)
350  
351 <        ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
352 <        ## self.DBSPaths=self.pubdata.getDBSPaths()
353 <        common.logger.message("Required data are :"+self.datasetPath)
278 <
279 <        filesbyblock=self.pubdata.getFiles()
280 < #        print filesbyblock
281 <        self.AllInputFiles=filesbyblock.values()
282 <        self.files = self.AllInputFiles        
351 >        self.filesbyblock=self.pubdata.getFiles()
352 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
353 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
354  
355          ## get max number of events
285        #common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
356          self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
287        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
357  
358          ## Contact the DLS and build a list of sites hosting the fileblocks
359          try:
360 <            dataloc=DataLocation_EDM.DataLocation_EDM(filesbyblock.keys(),cfg_params)
360 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
361              dataloc.fetchDLSInfo()
362 <        except DataLocation_EDM.DataLocationError , ex:
362 >        except DataLocation.DataLocationError , ex:
363              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
364              raise CrabException(msg)
365          
297        allsites=dataloc.getSites()
298        common.logger.debug(5,"sites are %s"%allsites)
299        sites=self.checkBlackList(allsites)
300        common.logger.debug(5,"sites are (after black list) %s"%sites)
301        sites=self.checkWhiteList(sites)
302        common.logger.debug(5,"sites are (after white list) %s"%sites)
366  
367 <        if len(sites)==0:
368 <            msg = 'No sites hosting all the needed data! Exiting... '
369 <            raise CrabException(msg)
367 >        sites = dataloc.getSites()
368 >        allSites = []
369 >        listSites = sites.values()
370 >        for listSite in listSites:
371 >            for oneSite in listSite:
372 >                allSites.append(oneSite)
373 >        allSites = self.uniquelist(allSites)
374  
375 <        common.logger.message("List of Sites hosting the data : "+str(sites))
376 <        common.logger.debug(6, "List of Sites: "+str(sites))
377 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
378 <        self.setParam_('TargetCE', ','.join(sites))
312 <        return
375 >        # screen output
376 >        common.logger.message("Requested dataset: " + datasetPath + " has " + str(self.maxEvents) + " events in " + str(len(self.filesbyblock.keys())) + " blocks.\n")
377 >
378 >        return sites
379      
380 <    def jobSplittingPerFiles(self):
315 <        """
316 <        Perform job splitting based on number of files to be accessed per job
380 >    def jobSplittingByBlocks(self, blockSites):
381          """
382 <        common.logger.debug(5,'Splitting per input files')
383 <        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
384 <        common.logger.message('Available '+str(self.maxEvents)+' events in total ')
385 <        common.logger.message('Required '+str(self.filesPerJob)+' files per job ')
386 <        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
387 <        common.logger.message('Required '+str(self.eventsPerJob)+' events per job')
388 <
389 <        ## if asked to process all events, do it
390 <        if self.total_number_of_events == -1:
391 <            self.total_number_of_events=self.maxEvents
382 >        Perform job splitting. Jobs run over an integer number of files
383 >        and no more than one block.
384 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
385 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
386 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
387 >                  self.maxEvents, self.filesbyblock
388 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
389 >              self.total_number_of_jobs - Total # of jobs
390 >              self.list_of_args - File(s) job will run on (a list of lists)
391 >        """
392 >
393 >        # ---- Handle the possible job splitting configurations ---- #
394 >        if (self.selectTotalNumberEvents):
395 >            totalEventsRequested = self.total_number_of_events
396 >        if (self.selectEventsPerJob):
397 >            eventsPerJobRequested = self.eventsPerJob
398 >            if (self.selectNumberOfJobs):
399 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
400 >
401 >        # If user requested all the events in the dataset
402 >        if (totalEventsRequested == -1):
403 >            eventsRemaining=self.maxEvents
404 >        # If user requested more events than are in the dataset
405 >        elif (totalEventsRequested > self.maxEvents):
406 >            eventsRemaining = self.maxEvents
407 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
408 >        # If user requested less events than are in the dataset
409          else:
410 <            if self.total_number_of_events>self.maxEvents:
330 <                common.logger.message("Asked "+str(self.total_number_of_events)+" but only "+str(self.maxEvents)+" available.")
331 <                self.total_number_of_events=self.maxEvents
332 <            pass
410 >            eventsRemaining = totalEventsRequested
411  
412 <        ## TODO: SL need to have (from DBS) a detailed list of how many events per each file
413 <        n_tot_files = (len(self.files[0]))
414 <        ## SL: this is wrong if the files have different number of events
337 <        evPerFile = int(self.maxEvents)/n_tot_files
338 <
339 <        common.logger.debug(5,'Events per File '+str(evPerFile))
340 <
341 <        ## compute job splitting parameters: filesPerJob, eventsPerJob and theNumberOfJobs
342 <        if self.selectFilesPerJob:
343 <            ## user define files per event.
344 <            filesPerJob = self.filesPerJob
345 <            eventsPerJob = filesPerJob*evPerFile
346 <            theNumberOfJobs = int(self.total_number_of_events*1./eventsPerJob)
347 <            check = int(self.total_number_of_events) - (theNumberOfJobs*eventsPerJob)
348 <            if check > 0:
349 <                theNumberOfJobs +=1
350 <                filesLastJob = int(check*1./evPerFile+0.5)
351 <                common.logger.message('Warning: last job will be created with '+str(check)+' files')
352 <            else:
353 <                filesLastJob = filesPerJob
412 >        # If user requested more events per job than are in the dataset
413 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
414 >            eventsPerJobRequested = self.maxEvents
415  
416 <        elif self.selectNumberOfJobs:
417 <            ## User select the number of jobs: last might be bigger to match request of events
357 <            theNumberOfJobs =  self.theNumberOfJobs
358 <
359 <            eventsPerJob = self.total_number_of_events/theNumberOfJobs
360 <            filesPerJob = int(eventsPerJob/evPerFile)
361 <            if (filesPerJob==0) : filesPerJob=1
362 <            check = int(self.total_number_of_events) - (int(theNumberOfJobs)*filesPerJob*evPerFile)
363 <            if not check == 0:
364 <                if check<0:
365 <                    missingFiles = int(check/evPerFile)
366 <                    additionalJobs = int(missingFiles/filesPerJob)
367 <                    #print missingFiles, additionalJobs
368 <                    theNumberOfJobs+=additionalJobs
369 <                    common.logger.message('Warning: will create only '+str(theNumberOfJobs)+' jobs')
370 <                    check = int(self.total_number_of_events) - (int(theNumberOfJobs)*filesPerJob*evPerFile)
371 <                    
372 <                if check >0 :
373 <                    filesLastJob = filesPerJob+int(check*1./evPerFile+0.5)
374 <                    common.logger.message('Warning: last job will be created with '+str(filesLastJob*evPerFile)+' events')
375 <                else:
376 <                    filesLastJob = filesPerJob
377 <            else:
378 <                filesLastJob = filesPerJob
379 <        elif self.selectEventsPerJob:
380 <            # SL case if asked events per job
381 <            ## estimate the number of files per job to match the user requirement
382 <            filesPerJob = int(float(self.eventsPerJob)/float(evPerFile))
383 <            if filesPerJob==0: filesPerJob=1
384 <            common.logger.debug(5,"filesPerJob "+str(filesPerJob))
385 <            if (filesPerJob==0): filesPerJob=1
386 <            eventsPerJob=filesPerJob*evPerFile
387 <            theNumberOfJobs = int(self.total_number_of_events)/int(eventsPerJob)
388 <            check = int(self.total_number_of_events) - (int(theNumberOfJobs)*eventsPerJob)
389 <            if not check == 0:
390 <                missingFiles = int(check/evPerFile)
391 <                additionalJobs = int(missingFiles/filesPerJob)
392 <                if ( additionalJobs>0) : theNumberOfJobs+=additionalJobs
393 <                check = int(self.total_number_of_events) - (int(theNumberOfJobs)*eventsPerJob)
394 <                if not check == 0:
395 <                    if (check <0 ):
396 <                        filesLastJob = filesPerJob+int(check*1./evPerFile-0.5)
397 <                    else:
398 <                        theNumberOfJobs+=1
399 <                        filesLastJob = int(check*1./evPerFile+0.5)
416 >        # For user info at end
417 >        totalEventCount = 0
418  
419 <                    common.logger.message('Warning: last job will be created with '+str(filesLastJob*evPerFile)+' events')
420 <                else:
403 <                    filesLastJob = filesPerJob
404 <            else:
405 <                filesLastJob = filesPerJob
406 <        
407 <        self.total_number_of_jobs = theNumberOfJobs
419 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
420 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
421  
422 <        totalEventsToBeUsed=theNumberOfJobs*filesPerJob*evPerFile
423 <        if not check == 0:
411 <        #    print (theNumberOfJobs-1)*filesPerJob*evPerFile,filesLastJob*evPerFile
412 <            totalEventsToBeUsed=(theNumberOfJobs-1)*filesPerJob*evPerFile+filesLastJob*evPerFile
422 >        if (self.selectNumberOfJobs):
423 >            common.logger.message("May not create the exact number_of_jobs requested.")
424  
425 <        common.logger.message(str(self.total_number_of_jobs)+' jobs will be created, each for '+str(filesPerJob*evPerFile)+' events, for a total of '+str(totalEventsToBeUsed)+' events')
425 >        if ( self.ncjobs == 'all' ) :
426 >            totalNumberOfJobs = 999999999
427 >        else :
428 >            totalNumberOfJobs = self.ncjobs
429 >            
430  
431 <        totalFilesToBeUsed=filesPerJob*(theNumberOfJobs-1)+filesLastJob
431 >        blocks = blockSites.keys()
432 >        blockCount = 0
433 >        # Backup variable in case self.maxEvents counted events in a non-included block
434 >        numBlocksInDataset = len(blocks)
435  
436 <        ## set job arguments (files)
436 >        jobCount = 0
437          list_of_lists = []
438 <        lastFile=0
439 <        for i in range(0, int(totalFilesToBeUsed), filesPerJob)[:-1]:
440 <            parString = "\\{"
438 >
439 >        # list tracking which jobs are in which jobs belong to which block
440 >        jobsOfBlock = {}
441 >
442 >        # ---- Iterate over the blocks in the dataset until ---- #
443 >        # ---- we've met the requested total # of events    ---- #
444 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
445 >            block = blocks[blockCount]
446 >            blockCount += 1
447 >            if block not in jobsOfBlock.keys() :
448 >                jobsOfBlock[block] = []
449              
450 <            lastFile=i+filesPerJob
451 <            params = self.files[0][i: lastFile]
452 <            for i in range(len(params) - 1):
427 <                parString += '\\\"' + params[i] + '\\\"\,'
450 >            if self.eventsbyblock.has_key(block) :
451 >                numEventsInBlock = self.eventsbyblock[block]
452 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
453              
454 <            parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
455 <            list_of_lists.append([parString])
456 <            pass
457 <
458 <        ## last job
459 <        parString = "\\{"
460 <        
461 <        params = self.files[0][lastFile: lastFile+filesLastJob]
462 <        for i in range(len(params) - 1):
463 <            parString += '\\\"' + params[i] + '\\\"\,'
454 >                files = self.filesbyblock[block]
455 >                numFilesInBlock = len(files)
456 >                if (numFilesInBlock <= 0):
457 >                    continue
458 >                fileCount = 0
459 >
460 >                # ---- New block => New job ---- #
461 >                parString = "\\{"
462 >                # counter for number of events in files currently worked on
463 >                filesEventCount = 0
464 >                # flag if next while loop should touch new file
465 >                newFile = 1
466 >                # job event counter
467 >                jobSkipEventCount = 0
468 >            
469 >                # ---- Iterate over the files in the block until we've met the requested ---- #
470 >                # ---- total # of events or we've gone over all the files in this block  ---- #
471 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
472 >                    file = files[fileCount]
473 >                    if newFile :
474 >                        try:
475 >                            numEventsInFile = self.eventsbyfile[file]
476 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
477 >                            # increase filesEventCount
478 >                            filesEventCount += numEventsInFile
479 >                            # Add file to current job
480 >                            parString += '\\\"' + file + '\\\"\,'
481 >                            newFile = 0
482 >                        except KeyError:
483 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
484 >                        
485 >
486 >                    # if less events in file remain than eventsPerJobRequested
487 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) :
488 >                        # if last file in block
489 >                        if ( fileCount == numFilesInBlock-1 ) :
490 >                            # end job using last file, use remaining events in block
491 >                            # close job and touch new file
492 >                            fullString = parString[:-2]
493 >                            fullString += '\\}'
494 >                            list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
495 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
496 >                            self.jobDestination.append(blockSites[block])
497 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
498 >                            # fill jobs of block dictionary
499 >                            jobsOfBlock[block].append(jobCount+1)
500 >                            # reset counter
501 >                            jobCount = jobCount + 1
502 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
503 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
504 >                            jobSkipEventCount = 0
505 >                            # reset file
506 >                            parString = "\\{"
507 >                            filesEventCount = 0
508 >                            newFile = 1
509 >                            fileCount += 1
510 >                        else :
511 >                            # go to next file
512 >                            newFile = 1
513 >                            fileCount += 1
514 >                    # if events in file equal to eventsPerJobRequested
515 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
516 >                        # close job and touch new file
517 >                        fullString = parString[:-2]
518 >                        fullString += '\\}'
519 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
520 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
521 >                        self.jobDestination.append(blockSites[block])
522 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
523 >                        jobsOfBlock[block].append(jobCount+1)
524 >                        # reset counter
525 >                        jobCount = jobCount + 1
526 >                        totalEventCount = totalEventCount + eventsPerJobRequested
527 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
528 >                        jobSkipEventCount = 0
529 >                        # reset file
530 >                        parString = "\\{"
531 >                        filesEventCount = 0
532 >                        newFile = 1
533 >                        fileCount += 1
534 >                        
535 >                    # if more events in file remain than eventsPerJobRequested
536 >                    else :
537 >                        # close job but don't touch new file
538 >                        fullString = parString[:-2]
539 >                        fullString += '\\}'
540 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
541 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
542 >                        self.jobDestination.append(blockSites[block])
543 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
544 >                        jobsOfBlock[block].append(jobCount+1)
545 >                        # increase counter
546 >                        jobCount = jobCount + 1
547 >                        totalEventCount = totalEventCount + eventsPerJobRequested
548 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
549 >                        # calculate skip events for last file
550 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
551 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
552 >                        # remove all but the last file
553 >                        filesEventCount = self.eventsbyfile[file]
554 >                        parString = "\\{"
555 >                        parString += '\\\"' + file + '\\\"\,'
556 >                    pass # END if
557 >                pass # END while (iterate over files in the block)
558 >        pass # END while (iterate over blocks in the dataset)
559 >        self.ncjobs = self.total_number_of_jobs = jobCount
560 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
561 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
562 >        common.logger.message(str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
563          
564 <        parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
565 <        list_of_lists.append([parString])
566 <        pass
564 >        # screen output
565 >        screenOutput = "List of jobs and available destination sites:\n\n"
566 >
567 >        blockCounter = 0
568 >        for block in blocks:
569 >            if block in jobsOfBlock.keys() :
570 >                blockCounter += 1
571 >                screenOutput += "Block %5i: jobs %20s: sites: %s\n" % (blockCounter,spanRanges(jobsOfBlock[block]),','.join(blockSites[block]))
572 >
573 >        common.logger.message(screenOutput)
574  
575          self.list_of_args = list_of_lists
445        # print self.list_of_args[0]
576          return
577  
578      def jobSplittingNoInput(self):
# Line 459 | Line 589 | class Cmssw(JobType):
589              raise CrabException(msg)
590  
591          if (self.selectEventsPerJob):
592 <            self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
592 >            if (self.selectTotalNumberEvents):
593 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
594 >            elif(self.selectNumberOfJobs) :  
595 >                self.total_number_of_jobs =self.theNumberOfJobs
596 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
597 >
598          elif (self.selectNumberOfJobs) :
599              self.total_number_of_jobs = self.theNumberOfJobs
600              self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
601 <
601 >
602          common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
603  
604          # is there any remainder?
# Line 471 | Line 606 | class Cmssw(JobType):
606  
607          common.logger.debug(5,'Check  '+str(check))
608  
609 <        common.logger.message(str(self.total_number_of_jobs)+' jobs will be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
609 >        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
610          if check > 0:
611 <            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but will do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
477 <
611 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
612  
613          # argument is seed number.$i
614          self.list_of_args = []
615          for i in range(self.total_number_of_jobs):
616 <            if (self.sourceSeed):
617 <                self.list_of_args.append([(str(self.sourceSeed)+str(i))])
616 >            ## Since there is no input, any site is good
617 >           # self.jobDestination.append(["Any"])
618 >            self.jobDestination.append([""]) #must be empty to write correctly the xml
619 >            args=[]
620 >            if (self.firstRun):
621 >                    ## pythia first run
622 >                #self.list_of_args.append([(str(self.firstRun)+str(i))])
623 >                args.append(str(self.firstRun)+str(i))
624              else:
625 <                self.list_of_args.append([str(i)])
626 <        #print self.list_of_args
625 >                ## no first run
626 >                #self.list_of_args.append([str(i)])
627 >                args.append(str(i))
628 >            if (self.sourceSeed):
629 >                args.append(str(self.sourceSeed)+str(i))
630 >                if (self.sourceSeedVtx):
631 >                    ## + vtx random seed
632 >                    args.append(str(self.sourceSeedVtx)+str(i))
633 >                if (self.sourceSeedG4):
634 >                    ## + G4 random seed
635 >                    args.append(str(self.sourceSeedG4)+str(i))
636 >                if (self.sourceSeedMix):    
637 >                    ## + Mix random seed
638 >                    args.append(str(self.sourceSeedMix)+str(i))
639 >                pass
640 >            pass
641 >            self.list_of_args.append(args)
642 >        pass
643 >            
644 >        # print self.list_of_args
645 >
646 >        return
647 >
648 >
649 >    def jobSplittingForScript(self):#CarlosDaniele
650 >        """
651 >        Perform job splitting based on number of job
652 >        """
653 >        common.logger.debug(5,'Splitting per job')
654 >        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
655 >
656 >        self.total_number_of_jobs = self.theNumberOfJobs
657 >
658 >        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
659  
660 +        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
661 +
662 +        # argument is seed number.$i
663 +        self.list_of_args = []
664 +        for i in range(self.total_number_of_jobs):
665 +            ## Since there is no input, any site is good
666 +           # self.jobDestination.append(["Any"])
667 +            self.jobDestination.append([""])
668 +            ## no random seed
669 +            self.list_of_args.append([str(i)])
670          return
671  
672      def split(self, jobParams):
# Line 502 | Line 684 | class Cmssw(JobType):
684              # print str(arglist[job])
685              # print jobParams[job]
686              common.jobDB.setArguments(job, jobParams[job])
687 +            common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job]))
688 +            common.jobDB.setDestination(job, self.jobDestination[job])
689  
690          common.jobDB.save()
691          return
# Line 516 | Line 700 | class Cmssw(JobType):
700          # Fabio
701          return self.total_number_of_jobs
702  
519    def checkBlackList(self, allSites):
520        if len(self.reCEBlackList)==0: return allSites
521        sites = []
522        for site in allSites:
523            common.logger.debug(10,'Site '+site)
524            good=1
525            for re in self.reCEBlackList:
526                if re.search(site):
527                    common.logger.message('CE in black list, skipping site '+site)
528                    good=0
529                pass
530            if good: sites.append(site)
531        if len(sites) == 0:
532            common.logger.debug(3,"No sites found after BlackList")
533        return sites
534
535    def checkWhiteList(self, allSites):
536
537        if len(self.reCEWhiteList)==0: return allSites
538        sites = []
539        for site in allSites:
540            good=0
541            for re in self.reCEWhiteList:
542                if re.search(site):
543                    common.logger.debug(5,'CE in white list, adding site '+site)
544                    good=1
545                if not good: continue
546                sites.append(site)
547        if len(sites) == 0:
548            common.logger.message("No sites found after WhiteList\n")
549        else:
550            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
551        return sites
552
703      def getTarBall(self, exe):
704          """
705          Return the TarBall with lib and exe
706          """
707          
708          # if it exist, just return it
709 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
709 >        #
710 >        # Marco. Let's start to use relative path for Boss XML files
711 >        #
712 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
713          if os.path.exists(self.tgzNameWithPath):
714              return self.tgzNameWithPath
715  
# Line 570 | Line 723 | class Cmssw(JobType):
723          # First of all declare the user Scram area
724          swArea = self.scram.getSWArea_()
725          #print "swArea = ", swArea
726 <        swVersion = self.scram.getSWVersion()
727 <        #print "swVersion = ", swVersion
726 >        # swVersion = self.scram.getSWVersion()
727 >        # print "swVersion = ", swVersion
728          swReleaseTop = self.scram.getReleaseTop_()
729          #print "swReleaseTop = ", swReleaseTop
730          
# Line 579 | Line 732 | class Cmssw(JobType):
732          if swReleaseTop == '' or swArea == swReleaseTop:
733              return
734  
735 <        filesToBeTarred = []
736 <        ## First find the executable
737 <        if (self.executable != ''):
738 <            exeWithPath = self.scram.findFile_(executable)
739 < #           print exeWithPath
740 <            if ( not exeWithPath ):
741 <                raise CrabException('User executable '+executable+' not found')
742 <
743 <            ## then check if it's private or not
744 <            if exeWithPath.find(swReleaseTop) == -1:
745 <                # the exe is private, so we must ship
746 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
747 <                path = swArea+'/'
748 <                exe = string.replace(exeWithPath, path,'')
749 <                filesToBeTarred.append(exe)
750 <                pass
751 <            else:
752 <                # the exe is from release, we'll find it on WN
753 <                pass
754 <
755 <        ## Now get the libraries: only those in local working area
756 <        libDir = 'lib'
757 <        lib = swArea+'/' +libDir
758 <        common.logger.debug(5,"lib "+lib+" to be tarred")
759 <        if os.path.exists(lib):
760 <            filesToBeTarred.append(libDir)
761 <
762 <        ## Now check if module dir is present
763 <        moduleDir = 'module'
764 <        if os.path.isdir(swArea+'/'+moduleDir):
765 <            filesToBeTarred.append(moduleDir)
766 <
767 <        ## Now check if the Data dir is present
768 <        dataDir = 'src/Data/'
769 <        if os.path.isdir(swArea+'/'+dataDir):
770 <            filesToBeTarred.append(dataDir)
771 <
772 <        ## Create the tar-ball
773 <        if len(filesToBeTarred)>0:
774 <            cwd = os.getcwd()
775 <            os.chdir(swArea)
776 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
777 <            for line in filesToBeTarred:
778 <                tarcmd = tarcmd + line + ' '
779 <            cout = runCommand(tarcmd)
780 <            if not cout:
781 <                raise CrabException('Could not create tar-ball')
782 <            os.chdir(cwd)
783 <        else:
784 <            common.logger.debug(5,"No files to be to be tarred")
735 >        import tarfile
736 >        try: # create tar ball
737 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
738 >            ## First find the executable
739 >            if (self.executable != ''):
740 >                exeWithPath = self.scram.findFile_(executable)
741 >                if ( not exeWithPath ):
742 >                    raise CrabException('User executable '+executable+' not found')
743 >    
744 >                ## then check if it's private or not
745 >                if exeWithPath.find(swReleaseTop) == -1:
746 >                    # the exe is private, so we must ship
747 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
748 >                    path = swArea+'/'
749 >                    # distinguish case when script is in user project area or given by full path somewhere else
750 >                    if exeWithPath.find(path) >= 0 :
751 >                        exe = string.replace(exeWithPath, path,'')
752 >                        tar.add(path+exe,os.path.basename(executable))
753 >                    else :
754 >                        tar.add(exeWithPath,os.path.basename(executable))
755 >                    pass
756 >                else:
757 >                    # the exe is from release, we'll find it on WN
758 >                    pass
759 >    
760 >            ## Now get the libraries: only those in local working area
761 >            libDir = 'lib'
762 >            lib = swArea+'/' +libDir
763 >            common.logger.debug(5,"lib "+lib+" to be tarred")
764 >            if os.path.exists(lib):
765 >                tar.add(lib,libDir)
766 >    
767 >            ## Now check if module dir is present
768 >            moduleDir = 'module'
769 >            module = swArea + '/' + moduleDir
770 >            if os.path.isdir(module):
771 >                tar.add(module,moduleDir)
772 >
773 >            ## Now check if any data dir(s) is present
774 >            swAreaLen=len(swArea)
775 >            for root, dirs, files in os.walk(swArea):
776 >                if "data" in dirs:
777 >                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
778 >                    tar.add(root+"/data",root[swAreaLen:]+"/data")
779 >
780 >            ## Add ProdAgent dir to tar
781 >            paDir = 'ProdAgentApi'
782 >            pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi'
783 >            if os.path.isdir(pa):
784 >                tar.add(pa,paDir)
785 >
786 >            ### FEDE FOR DBS PUBLICATION
787 >            ## Add PRODCOMMON dir to tar
788 >            prodcommonDir = 'ProdCommon'
789 >            prodcommonPath = os.environ['CRABDIR'] + '/' + 'ProdCommon'
790 >            if os.path.isdir(prodcommonPath):
791 >                tar.add(prodcommonPath,prodcommonDir)
792 >            #############################    
793 >        
794 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
795 >            tar.close()
796 >        except :
797 >            raise CrabException('Could not create tar-ball')
798 >
799 >        ## check for tarball size
800 >        tarballinfo = os.stat(self.tgzNameWithPath)
801 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
802 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
803 >
804 >        ## create tar-ball with ML stuff
805 >        self.MLtgzfile =  common.work_space.pathForTgz()+'share/MLfiles.tgz'
806 >        try:
807 >            tar = tarfile.open(self.MLtgzfile, "w:gz")
808 >            path=os.environ['CRABDIR'] + '/python/'
809 >            for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py']:
810 >                tar.add(path+file,file)
811 >            common.logger.debug(5,"Files added to "+self.MLtgzfile+" : "+str(tar.getnames()))
812 >            tar.close()
813 >        except :
814 >            raise CrabException('Could not create ML files tar-ball')
815          
816          return
817          
818 +    def additionalInputFileTgz(self):
819 +        """
820 +        Put all additional files into a tar ball and return its name
821 +        """
822 +        import tarfile
823 +        tarName=  common.work_space.pathForTgz()+'share/'+self.additional_tgz_name
824 +        tar = tarfile.open(tarName, "w:gz")
825 +        for file in self.additional_inbox_files:
826 +            tar.add(file,string.split(file,'/')[-1])
827 +        common.logger.debug(5,"Files added to "+self.additional_tgz_name+" : "+str(tar.getnames()))
828 +        tar.close()
829 +        return tarName
830 +
831      def wsSetupEnvironment(self, nj):
832          """
833          Returns part of a job script which prepares
# Line 642 | Line 838 | class Cmssw(JobType):
838    
839          ## OLI_Daniele at this level  middleware already known
840  
841 +        txt += 'echo "### Firtst set SCRAM ARCH and BUILD_ARCH ###"\n'
842 +        txt += 'echo "Setting SCRAM_ARCH='+self.executable_arch+'"\n'
843 +        txt += 'export SCRAM_ARCH='+self.executable_arch+'\n'
844 +        txt += 'export BUILD_ARCH='+self.executable_arch+'\n'
845          txt += 'if [ $middleware == LCG ]; then \n'
846          txt += self.wsSetupCMSLCGEnvironment_()
847          txt += 'elif [ $middleware == OSG ]; then\n'
848 <        txt += '    time=`date -u +"%s"`\n'
849 <        txt += '    WORKING_DIR=$OSG_WN_TMP/cms_$time\n'
650 <        txt += '    echo "Creating working directory: $WORKING_DIR"\n'
651 <        txt += '    /bin/mkdir -p $WORKING_DIR\n'
848 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
849 >        txt += '    echo "Created working directory: $WORKING_DIR"\n'
850          txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
851          txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
852 <        txt += '        echo "JOB_EXIT_STATUS = 10016"\n'
853 <        txt += '        echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
854 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
852 >        txt += '    echo "JOB_EXIT_STATUS = 10016"\n'
853 >        txt += '    echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
854 >        txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
855          txt += '        rm -f $RUNTIME_AREA/$repo \n'
856          txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
857          txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
# Line 685 | Line 883 | class Cmssw(JobType):
883          txt += '        cd $RUNTIME_AREA\n'
884          txt += '        /bin/rm -rf $WORKING_DIR\n'
885          txt += '        if [ -d $WORKING_DIR ] ;then\n'
886 <        txt += '            echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
887 <        txt += '            echo "JOB_EXIT_STATUS = 10018"\n'
888 <        txt += '            echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
889 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
886 >        txt += '            echo "SET_CMS_ENV 10018 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after CMSSW CMSSW_0_6_1 not found on `hostname`"\n'
887 >        txt += '            echo "JOB_EXIT_STATUS = 10018"\n'
888 >        txt += '            echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
889 >        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
890          txt += '            rm -f $RUNTIME_AREA/$repo \n'
891          txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
892          txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
# Line 698 | Line 896 | class Cmssw(JobType):
896          txt += 'fi \n'
897          txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
898          txt += 'cd '+self.version+'\n'
899 +        ########## FEDE FOR DBS2 ######################
900 +        txt += 'SOFTWARE_DIR=`pwd`\n'
901 +        txt += 'echo SOFTWARE_DIR=$SOFTWARE_DIR \n'
902 +        ###############################################
903          ### needed grep for bug in scramv1 ###
904 +        txt += scram+' runtime -sh\n'
905          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
906 +        txt += 'echo $PATH\n'
907  
908          # Handle the arguments:
909          txt += "\n"
910          txt += "## number of arguments (first argument always jobnumber)\n"
911          txt += "\n"
912 <        txt += "narg=$#\n"
913 <        txt += "if [ $narg -lt 2 ]\n"
912 > #        txt += "narg=$#\n"
913 >        txt += "if [ $nargs -lt 2 ]\n"
914          txt += "then\n"
915 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$narg+ \n"
915 >        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$nargs+ \n"
916          txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
917          txt += '    echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n'
918          txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
# Line 721 | Line 925 | class Cmssw(JobType):
925          txt += '        cd $RUNTIME_AREA\n'
926          txt += '        /bin/rm -rf $WORKING_DIR\n'
927          txt += '        if [ -d $WORKING_DIR ] ;then\n'
928 <        txt += '            echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
929 <        txt += '            echo "JOB_EXIT_STATUS = 50114"\n'
930 <        txt += '            echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
931 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
928 >        txt += '            echo "SET_EXE_ENV 50114 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Too few arguments for CRAB job wrapper"\n'
929 >        txt += '            echo "JOB_EXIT_STATUS = 50114"\n'
930 >        txt += '            echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
931 >        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
932          txt += '            rm -f $RUNTIME_AREA/$repo \n'
933          txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
934          txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
# Line 736 | Line 940 | class Cmssw(JobType):
940  
941          # Prepare job-specific part
942          job = common.job_list[nj]
943 <        pset = os.path.basename(job.configFilename())
944 <        txt += '\n'
945 <        if (self.datasetPath): # standard job
946 <            txt += 'InputFiles=$2\n'
947 <            txt += 'echo "Inputfiles:<$InputFiles>"\n'
948 <            txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
949 <        else:  # pythia like job
950 <            if (self.sourceSeed):
951 <                txt += 'Seed=$2\n'
952 <                txt += 'echo "Seed: <$Seed>"\n'
953 <                txt += 'sed "s#INPUT#$Seed#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
943 >        ### FEDE FOR DBS OUTPUT PUBLICATION
944 >        if (self.datasetPath):
945 >            txt += '\n'
946 >            txt += 'DatasetPath='+self.datasetPath+'\n'
947 >
948 >            datasetpath_split = self.datasetPath.split("/")
949 >            
950 >            txt += 'PrimaryDataset='+datasetpath_split[1]+'\n'
951 >            txt += 'DataTier='+datasetpath_split[2]+'\n'
952 >            #txt += 'ProcessedDataset='+datasetpath_split[3]+'\n'
953 >            txt += 'ApplicationFamily=cmsRun\n'
954 >
955 >        else:
956 >            txt += 'DatasetPath=MCDataTier\n'
957 >            txt += 'PrimaryDataset=null\n'
958 >            txt += 'DataTier=null\n'
959 >            #txt += 'ProcessedDataset=null\n'
960 >            txt += 'ApplicationFamily=MCDataTier\n'
961 >        if self.pset != None: #CarlosDaniele
962 >            pset = os.path.basename(job.configFilename())
963 >            txt += '\n'
964 >            txt += 'cp  $RUNTIME_AREA/'+pset+' .\n'
965 >            if (self.datasetPath): # standard job
966 >                #txt += 'InputFiles=$2\n'
967 >                txt += 'InputFiles=${args[1]}\n'
968 >                txt += 'MaxEvents=${args[2]}\n'
969 >                txt += 'SkipEvents=${args[3]}\n'
970 >                txt += 'echo "Inputfiles:<$InputFiles>"\n'
971 >                txt += 'sed "s#{\'INPUT\'}#$InputFiles#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
972 >                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
973 >                txt += 'sed "s#INPUTMAXEVENTS#$MaxEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
974 >                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
975 >                txt += 'sed "s#INPUTSKIPEVENTS#$SkipEvents#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
976 >            else:  # pythia like job
977 >                seedIndex=1
978 >                if (self.firstRun):
979 >                    txt += 'FirstRun=${args['+str(seedIndex)+']}\n'
980 >                    txt += 'echo "FirstRun: <$FirstRun>"\n'
981 >                    txt += 'sed "s#\<INPUTFIRSTRUN\>#$FirstRun#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
982 >                    seedIndex=seedIndex+1
983 >
984 >                if (self.sourceSeed):
985 >                    txt += 'Seed=${args['+str(seedIndex)+']}\n'
986 >                    txt += 'sed "s#\<INPUT\>#$Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
987 >                    seedIndex=seedIndex+1
988 >                    ## the following seeds are not always present
989 >                    if (self.sourceSeedVtx):
990 >                        txt += 'VtxSeed=${args['+str(seedIndex)+']}\n'
991 >                        txt += 'echo "VtxSeed: <$VtxSeed>"\n'
992 >                        txt += 'sed "s#\<INPUTVTX\>#$VtxSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
993 >                        seedIndex += 1
994 >                    if (self.sourceSeedG4):
995 >                        txt += 'G4Seed=${args['+str(seedIndex)+']}\n'
996 >                        txt += 'echo "G4Seed: <$G4Seed>"\n'
997 >                        txt += 'sed "s#\<INPUTG4\>#$G4Seed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
998 >                        seedIndex += 1
999 >                    if (self.sourceSeedMix):
1000 >                        txt += 'mixSeed=${args['+str(seedIndex)+']}\n'
1001 >                        txt += 'echo "MixSeed: <$mixSeed>"\n'
1002 >                        txt += 'sed "s#\<INPUTMIX\>#$mixSeed#" '+pset+' > tmp && mv -f tmp '+pset+'\n'
1003 >                        seedIndex += 1
1004 >                    pass
1005 >                pass
1006 >            txt += 'mv -f '+pset+' pset.cfg\n'
1007  
1008          if len(self.additional_inbox_files) > 0:
1009 <            for file in self.additional_inbox_files:
1010 <                txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
1011 <                txt += '   cp $RUNTIME_AREA/'+file+' .\n'
755 <                txt += '   chmod +x '+file+'\n'
756 <                txt += 'fi\n'
1009 >            txt += 'if [ -e $RUNTIME_AREA/'+self.additional_tgz_name+' ] ; then\n'
1010 >            txt += '  tar xzvf $RUNTIME_AREA/'+self.additional_tgz_name+'\n'
1011 >            txt += 'fi\n'
1012              pass
1013  
1014 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
1015 <
1016 <        txt += '\n'
1017 <        txt += 'echo "***** cat pset.cfg *********"\n'
1018 <        txt += 'cat pset.cfg\n'
1019 <        txt += 'echo "****** end pset.cfg ********"\n'
1020 <        txt += '\n'
1021 <        # txt += 'echo "***** cat pset1.cfg *********"\n'
1022 <        # txt += 'cat pset1.cfg\n'
1023 <        # txt += 'echo "****** end pset1.cfg ********"\n'
1014 >        if self.pset != None: #CarlosDaniele
1015 >            txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
1016 >        
1017 >            txt += '\n'
1018 >            txt += 'echo "***** cat pset.cfg *********"\n'
1019 >            txt += 'cat pset.cfg\n'
1020 >            txt += 'echo "****** end pset.cfg ********"\n'
1021 >            txt += '\n'
1022 >            ### FEDE FOR DBS OUTPUT PUBLICATION
1023 >            txt += 'PSETHASH=`EdmConfigHash < pset.cfg` \n'
1024 >            txt += 'echo "PSETHASH = $PSETHASH" \n'
1025 >            ##############
1026 >            txt += '\n'
1027 >            # txt += 'echo "***** cat pset1.cfg *********"\n'
1028 >            # txt += 'cat pset1.cfg\n'
1029 >            # txt += 'echo "****** end pset1.cfg ********"\n'
1030          return txt
1031  
1032 <    def wsBuildExe(self, nj):
1032 >    def wsBuildExe(self, nj=0):
1033          """
1034          Put in the script the commands to build an executable
1035          or a library.
# Line 803 | Line 1064 | class Cmssw(JobType):
1064              txt += 'else \n'
1065              txt += '   echo "Successful untar" \n'
1066              txt += 'fi \n'
1067 +            txt += '\n'
1068 +            txt += 'echo "Include ProdAgentApi and PRODCOMMON in PYTHONPATH"\n'
1069 +            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
1070 +            #### FEDE FOR DBS OUTPUT PUBLICATION
1071 +            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon\n'
1072 +            #txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon\n'
1073 +            #txt += '   export PYTHONPATH=ProdAgentApi\n'
1074 +            txt += 'else\n'
1075 +            txt += '   export PYTHONPATH=$SOFTWARE_DIR/ProdAgentApi:$SOFTWARE_DIR/ProdCommon:${PYTHONPATH}\n'
1076 +            #txt += '   export PYTHONPATH=`pwd`/ProdAgentApi:`pwd`/ProdCommon:${PYTHONPATH}\n'
1077 +            #txt += '   export PYTHONPATH=ProdAgentApi:${PYTHONPATH}\n'
1078 +            txt += 'echo "PYTHONPATH=$PYTHONPATH"\n'
1079 +            ###################  
1080 +            txt += 'fi\n'
1081 +            txt += '\n'
1082 +
1083              pass
1084          
1085          return txt
# Line 814 | Line 1091 | class Cmssw(JobType):
1091          """
1092          
1093      def executableName(self):
1094 <        return self.executable
1094 >        if self.scriptExe: #CarlosDaniele
1095 >            return "sh "
1096 >        else:
1097 >            return self.executable
1098  
1099      def executableArgs(self):
1100 <        return " -p pset.cfg"
1100 >        if self.scriptExe:#CarlosDaniele
1101 >            return   self.scriptExe + " $NJob"
1102 >        else:
1103 >            # if >= CMSSW_1_5_X, add -e
1104 >            version_array = self.scram.getSWVersion().split('_')
1105 >            major = 0
1106 >            minor = 0
1107 >            try:
1108 >                major = int(version_array[1])
1109 >                minor = int(version_array[2])
1110 >            except:
1111 >                msg = "Cannot parse CMSSW version string: " + "_".join(version_array) + " for major and minor release number!"  
1112 >                raise CrabException(msg)
1113 >            if major >= 1 and minor >= 5 :
1114 >                return " -e -p pset.cfg"
1115 >            else:
1116 >                return " -p pset.cfg"
1117  
1118      def inputSandbox(self, nj):
1119          """
1120          Returns a list of filenames to be put in JDL input sandbox.
1121          """
1122          inp_box = []
1123 <        # dict added to delete duplicate from input sandbox file list
1124 <        seen = {}
1123 >        # # dict added to delete duplicate from input sandbox file list
1124 >        # seen = {}
1125          ## code
1126          if os.path.isfile(self.tgzNameWithPath):
1127              inp_box.append(self.tgzNameWithPath)
1128 +        if os.path.isfile(self.MLtgzfile):
1129 +            inp_box.append(self.MLtgzfile)
1130          ## config
1131 <        inp_box.append(common.job_list[nj].configFilename())
1131 >        if not self.pset is None:
1132 >            inp_box.append(common.work_space.pathForTgz() + 'job/' + self.configFilename())
1133          ## additional input files
1134 <        #for file in self.additional_inbox_files:
1135 <        #    inp_box.append(common.work_space.cwdDir()+file)
1134 >        tgz = self.additionalInputFileTgz()
1135 >        inp_box.append(tgz)
1136          return inp_box
1137  
1138      def outputSandbox(self, nj):
# Line 842 | Line 1141 | class Cmssw(JobType):
1141          """
1142          out_box = []
1143  
845        stdout=common.job_list[nj].stdout()
846        stderr=common.job_list[nj].stderr()
847
1144          ## User Declared output files
1145 <        for out in self.output_file:
1145 >        for out in (self.output_file+self.output_file_sandbox):
1146              n_out = nj + 1
1147              out_box.append(self.numberFile_(out,str(n_out)))
1148          return out_box
853        return []
1149  
1150      def prepareSteeringCards(self):
1151          """
# Line 866 | Line 1161 | class Cmssw(JobType):
1161          txt = '\n'
1162          txt += '# directory content\n'
1163          txt += 'ls \n'
1164 <        file_list = ''
1165 <        for fileWithSuffix in self.output_file:
1164 >
1165 >        for fileWithSuffix in (self.output_file+self.output_file_sandbox):
1166              output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
872            file_list=file_list+output_file_num+' '
1167              txt += '\n'
1168              txt += '# check output file\n'
1169 <            txt += 'ls '+fileWithSuffix+'\n'
1170 <            txt += 'ls_result=$?\n'
1171 <            #txt += 'exe_result=$?\n'
1172 <            txt += 'if [ $ls_result -ne 0 ] ; then\n'
1173 <            txt += '   echo "ERROR: Problem with output file"\n'
1174 <            #txt += '   echo "JOB_EXIT_STATUS = $exe_result"\n'
1175 <            #txt += '   echo "JobExitCode=60302" | tee -a $RUNTIME_AREA/$repo\n'
882 <            #txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
883 <            ### OLI_DANIELE
1169 >            # txt += 'ls '+fileWithSuffix+'\n'
1170 >            # txt += 'ls_result=$?\n'
1171 >            txt += 'if [ -e ./'+fileWithSuffix+' ] ; then\n'
1172 >            txt += '   mv '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1173 >            txt += 'else\n'
1174 >            txt += '   exit_status=60302\n'
1175 >            txt += '   echo "ERROR: Problem with output file '+fileWithSuffix+'"\n'
1176              if common.scheduler.boss_scheduler_name == 'condor_g':
1177                  txt += '    if [ $middleware == OSG ]; then \n'
1178                  txt += '        echo "prepare dummy output file"\n'
1179                  txt += '        echo "Processing of job output failed" > $RUNTIME_AREA/'+output_file_num+'\n'
1180                  txt += '    fi \n'
889            txt += 'else\n'
890            txt += '   cp '+fileWithSuffix+' $RUNTIME_AREA/'+output_file_num+'\n'
1181              txt += 'fi\n'
1182 +        file_list = []
1183 +        for fileWithSuffix in (self.output_file):
1184 +             file_list.append(self.numberFile_(fileWithSuffix, '$NJob'))
1185 +        txt += 'file_list="'+string.join(file_list,' ')+'"\n'
1186        
1187          txt += 'cd $RUNTIME_AREA\n'
1188 <        file_list=file_list[:-1]
895 <        txt += 'file_list="'+file_list+'"\n'
896 <        txt += 'cd $RUNTIME_AREA\n'
1188 >        #### FEDE this is the cleanEnv function
1189          ### OLI_DANIELE
1190 <        txt += 'if [ $middleware == OSG ]; then\n'  
1191 <        txt += '    cd $RUNTIME_AREA\n'
1192 <        txt += '    echo "Remove working directory: $WORKING_DIR"\n'
1193 <        txt += '    /bin/rm -rf $WORKING_DIR\n'
1194 <        txt += '    if [ -d $WORKING_DIR ] ;then\n'
1195 <        txt += '        echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1196 <        txt += '        echo "JOB_EXIT_STATUS = 60999"\n'
1197 <        txt += '        echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1198 <        txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1199 <        txt += '        rm -f $RUNTIME_AREA/$repo \n'
1200 <        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1201 <        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1202 <        txt += '    fi\n'
1203 <        txt += 'fi\n'
1204 <        txt += '\n'
1190 >        #txt += 'if [ $middleware == OSG ]; then\n'  
1191 >        #txt += '    cd $RUNTIME_AREA\n'
1192 >        #txt += '    echo "Remove working directory: $WORKING_DIR"\n'
1193 >        #txt += '    /bin/rm -rf $WORKING_DIR\n'
1194 >        #txt += '    if [ -d $WORKING_DIR ] ;then\n'
1195 >        #txt += '        echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1196 >        #txt += '        echo "JOB_EXIT_STATUS = 60999"\n'
1197 >        #txt += '        echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1198 >        #txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1199 >        #txt += '        rm -f $RUNTIME_AREA/$repo \n'
1200 >        #txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1201 >        #txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1202 >        #txt += '    fi\n'
1203 >        #txt += 'fi\n'
1204 >        #txt += '\n'
1205 >
1206 >
1207          return txt
1208  
1209      def numberFile_(self, file, txt):
# Line 920 | Line 1214 | class Cmssw(JobType):
1214          # take away last extension
1215          name = p[0]
1216          for x in p[1:-1]:
1217 <           name=name+"."+x
1217 >            name=name+"."+x
1218          # add "_txt"
1219          if len(p)>1:
1220 <          ext = p[len(p)-1]
1221 <          #result = name + '_' + str(txt) + "." + ext
928 <          result = name + '_' + txt + "." + ext
1220 >            ext = p[len(p)-1]
1221 >            result = name + '_' + txt + "." + ext
1222          else:
1223 <          #result = name + '_' + str(txt)
931 <          result = name + '_' + txt
1223 >            result = name + '_' + txt
1224          
1225          return result
1226  
1227 <    def getRequirements(self):
1227 >    def getRequirements(self, nj=[]):
1228          """
1229          return job requirements to add to jdl files
1230          """
1231          req = ''
1232 <        if common.analisys_common_info['sw_version']:
1232 >        if self.version:
1233              req='Member("VO-cms-' + \
1234 <                 common.analisys_common_info['sw_version'] + \
1234 >                 self.version + \
1235                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1236 <        if common.analisys_common_info['sites']:
1237 <            if len(common.analisys_common_info['sites'])>0:
1238 <                req = req + ' && ('
1239 <                for i in range(len(common.analisys_common_info['sites'])):
1240 <                    req = req + 'other.GlueCEInfoHostName == "' \
1241 <                         + common.analisys_common_info['sites'][i] + '"'
1242 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
1243 <                        req = req + ' || '
1244 <            req = req + ')'
953 <        #print "req = ", req
1236 >        ## SL add requirement for OS version only if SL4
1237 >        reSL4 = re.compile( r'slc4' )
1238 >        if self.executable_arch and reSL4.search(self.executable_arch):
1239 >            req+=' && Member("VO-cms-' + \
1240 >                 self.executable_arch + \
1241 >                 '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1242 >
1243 >        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1244 >
1245          return req
1246  
1247      def configFilename(self):
# Line 967 | Line 1258 | class Cmssw(JobType):
1258          txt += '   echo "### SETUP CMS OSG  ENVIRONMENT ###"\n'
1259          txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
1260          txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
1261 +        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
1262          txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1263 <        txt += '   elif [ -f $OSG_APP/cmssoft/cmsset_default.sh ] ;then\n'
1264 <        txt += '      # Use $OSG_APP/cmssoft/cmsset_default.sh to setup cms software\n'
1265 <        txt += '       source $OSG_APP/cmssoft/cmsset_default.sh '+self.version+'\n'
1263 >        txt += '   elif [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1264 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1265 >        txt += '       export SCRAM_ARCH='+self.executable_arch+'\n'
1266 >        txt += '       source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1267          txt += '   else\n'
1268 <        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1268 >        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1269          txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
1270          txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1271          txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
# Line 985 | Line 1278 | class Cmssw(JobType):
1278          txt += '       cd $RUNTIME_AREA\n'
1279          txt += '       /bin/rm -rf $WORKING_DIR\n'
1280          txt += '       if [ -d $WORKING_DIR ] ;then\n'
1281 <        txt += '            echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1282 <        txt += '            echo "JOB_EXIT_STATUS = 10017"\n'
1283 <        txt += '            echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1284 <        txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
1285 <        txt += '            rm -f $RUNTIME_AREA/$repo \n'
1286 <        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1287 <        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1281 >        txt += '           echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1282 >        txt += '           echo "JOB_EXIT_STATUS = 10017"\n'
1283 >        txt += '           echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1284 >        txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1285 >        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1286 >        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1287 >        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1288          txt += '       fi\n'
1289          txt += '\n'
1290          txt += '       exit 1\n'
# Line 1046 | Line 1339 | class Cmssw(JobType):
1339          txt += '       fi\n'
1340          txt += '   fi\n'
1341          txt += '   \n'
1049        txt += '   string=`cat /etc/redhat-release`\n'
1050        txt += '   echo $string\n'
1051        txt += '   if [[ $string = *alhalla* ]]; then\n'
1052        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1053        txt += '   elif [[ $string = *Enterprise* ]] || [[ $string = *cientific* ]]; then\n'
1054        txt += '       export SCRAM_ARCH=slc3_ia32_gcc323\n'
1055        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
1056        txt += '   else\n'
1057        txt += '       echo "SET_CMS_ENV 10033 ==> ERROR OS unknown, LCG environment not initialized"\n'
1058        txt += '       echo "JOB_EXIT_STATUS = 10033"\n'
1059        txt += '       echo "JobExitCode=10033" | tee -a $RUNTIME_AREA/$repo\n'
1060        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1061        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1062        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1063        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1064        txt += '       exit 1\n'
1065        txt += '   fi\n'
1342          txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1343          txt += '   echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
1344          return txt
1345  
1346 +    ### FEDE FOR DBS OUTPUT PUBLICATION
1347 +    def modifyReport(self, nj):
1348 +        """
1349 +        insert the part of the script that modifies the FrameworkJob Report
1350 +        """
1351 +
1352 +        txt = ''
1353 +        txt += 'echo "Modify Job Report" \n'
1354 +        #txt += 'chmod a+x $RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1355 +        ################ FEDE FOR DBS2 #############################################
1356 +        txt += 'chmod a+x $SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py\n'
1357 +        #############################################################################
1358 +        try:
1359 +            publish_data = int(self.cfg_params['USER.publish_data'])          
1360 +        except KeyError:
1361 +            publish_data = 0
1362 +
1363 +        txt += 'if [ -z "$SE" ]; then\n'
1364 +        txt += '    SE="" \n'
1365 +        txt += 'fi \n'
1366 +        txt += 'if [ -z "$SE_PATH" ]; then\n'
1367 +        txt += '    SE_PATH="" \n'
1368 +        txt += 'fi \n'
1369 +        txt += 'echo "SE = $SE"\n'
1370 +        txt += 'echo "SE_PATH = $SE_PATH"\n'
1371 +
1372 +        if (publish_data == 1):  
1373 +            #processedDataset = self.cfg_params['USER.processed_datasetname']
1374 +            processedDataset = self.cfg_params['USER.publish_data_name']
1375 +            txt += 'ProcessedDataset='+processedDataset+'\n'
1376 +            #### LFN=/store/user/<user>/processedDataset_PSETHASH
1377 +            txt += 'if [ "$SE_PATH" == "" ]; then\n'
1378 +            #### FEDE: added slash in LFN ##############
1379 +            txt += '    FOR_LFN=/copy_problems/ \n'
1380 +            txt += 'else \n'
1381 +            txt += '    tmp=`echo $SE_PATH | awk -F \'store\' \'{print$2}\'` \n'
1382 +            #####  FEDE TO BE CHANGED, BECAUSE STORE IS HARDCODED!!!! ########
1383 +            txt += '    FOR_LFN=/store$tmp \n'
1384 +            txt += 'fi \n'
1385 +            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1386 +            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1387 +            txt += 'echo "CMSSW_VERSION = $CMSSW_VERSION"\n\n'
1388 +            #txt += 'echo "$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1389 +            txt += 'echo "$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH"\n'
1390 +            txt += '$SOFTWARE_DIR/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1391 +            #txt += '$RUNTIME_AREA/'+self.version+'/ProdAgentApi/FwkJobRep/ModifyJobReport.py crab_fjr_$NJob.xml $NJob $FOR_LFN $PrimaryDataset $DataTier $ProcessedDataset $ApplicationFamily $executable $CMSSW_VERSION $PSETHASH $SE $SE_PATH\n'
1392 +      
1393 +            txt += 'modifyReport_result=$?\n'
1394 +            txt += 'echo modifyReport_result = $modifyReport_result\n'
1395 +            txt += 'if [ $modifyReport_result -ne 0 ]; then\n'
1396 +            txt += '    exit_status=1\n'
1397 +            txt += '    echo "ERROR: Problem with ModifyJobReport"\n'
1398 +            txt += 'else\n'
1399 +            txt += '    mv NewFrameworkJobReport.xml crab_fjr_$NJob.xml\n'
1400 +            txt += 'fi\n'
1401 +        else:
1402 +            txt += 'ProcessedDataset=no_data_to_publish \n'
1403 +            #### FEDE: added slash in LFN ##############
1404 +            txt += 'FOR_LFN=/local/ \n'
1405 +            txt += 'echo "ProcessedDataset = $ProcessedDataset"\n'
1406 +            txt += 'echo "FOR_LFN = $FOR_LFN" \n'
1407 +        return txt
1408 +
1409 +    def cleanEnv(self):
1410 +        ### OLI_DANIELE
1411 +        txt = ''
1412 +        txt += 'if [ $middleware == OSG ]; then\n'  
1413 +        txt += '    cd $RUNTIME_AREA\n'
1414 +        txt += '    echo "Remove working directory: $WORKING_DIR"\n'
1415 +        txt += '    /bin/rm -rf $WORKING_DIR\n'
1416 +        txt += '    if [ -d $WORKING_DIR ] ;then\n'
1417 +        txt += '              echo "SET_EXE 60999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after cleanup of WN"\n'
1418 +        txt += '              echo "JOB_EXIT_STATUS = 60999"\n'
1419 +        txt += '              echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1420 +        txt += '              dumpStatus $RUNTIME_AREA/$repo\n'
1421 +        txt += '        rm -f $RUNTIME_AREA/$repo \n'
1422 +        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1423 +        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1424 +        txt += '    fi\n'
1425 +        txt += 'fi\n'
1426 +        txt += '\n'
1427 +        return txt
1428 +
1429      def setParam_(self, param, value):
1430          self._params[param] = value
1431  
# Line 1078 | Line 1437 | class Cmssw(JobType):
1437          
1438      def getTaskid(self):
1439          return self._taskId
1440 +
1441 +    def uniquelist(self, old):
1442 +        """
1443 +        remove duplicates from a list
1444 +        """
1445 +        nd={}
1446 +        for e in old:
1447 +            nd[e]=0
1448 +        return nd.keys()

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines