ViewVC Help
View File | Revision Log | Show Annotations | Root Listing
root/cvsroot/COMP/CRAB/python/cms_cmssw.py
(Generate patch)

Comparing COMP/CRAB/python/cms_cmssw.py (file contents):
Revision 1.10 by slacapra, Thu Jun 22 16:05:18 2006 UTC vs.
Revision 1.73 by gutsche, Sun Apr 8 18:39:51 2007 UTC

# Line 4 | Line 4 | from crab_exceptions import *
4   from crab_util import *
5   import common
6   import PsetManipulator  
7 <
8 < import DBSInfo_EDM
9 < import DataDiscovery_EDM
10 < import DataLocation_EDM
7 > import DataDiscovery
8 > import DataDiscovery_DBS2
9 > import DataLocation
10   import Scram
11  
12 < import os, string, re
12 > import os, string, re, shutil, glob
13  
14   class Cmssw(JobType):
15 <    def __init__(self, cfg_params):
15 >    def __init__(self, cfg_params, ncjobs):
16          JobType.__init__(self, 'CMSSW')
17          common.logger.debug(3,'CMSSW::__init__')
18  
20        self.analisys_common_info = {}
19          # Marco.
20          self._params = {}
21          self.cfg_params = cfg_params
22 +
23 +        try:
24 +            self.MaxTarBallSize = float(self.cfg_params['EDG.maxtarballsize'])
25 +        except KeyError:
26 +            self.MaxTarBallSize = 100.0
27 +
28 +        # number of jobs requested to be created, limit obj splitting
29 +        self.ncjobs = ncjobs
30 +
31          log = common.logger
32          
33          self.scram = Scram.Scram(cfg_params)
27        scramArea = ''
34          self.additional_inbox_files = []
35          self.scriptExe = ''
36          self.executable = ''
37 +        self.executable_arch = self.scram.getArch()
38          self.tgz_name = 'default.tgz'
39 +        self.scriptName = 'CMSSW.sh'
40 +        self.pset = ''      #scrip use case Da  
41 +        self.datasetPath = '' #scrip use case Da
42  
43 +        # set FJR file name
44 +        self.fjrFileName = 'crab_fjr.xml'
45  
46          self.version = self.scram.getSWVersion()
47 +        common.taskDB.setDict('codeVersion',self.version)
48          self.setParam_('application', self.version)
36        common.analisys_common_info['sw_version'] = self.version
37        ### FEDE
38        common.analisys_common_info['copy_input_data'] = 0
39        common.analisys_common_info['events_management'] = 1
49  
50          ### collect Data cards
51 +
52 +        ## get DBS mode
53 +        try:
54 +            self.use_dbs_2 = int(self.cfg_params['CMSSW.use_dbs_2'])
55 +        except KeyError:
56 +            self.use_dbs_2 = 0
57 +            
58          try:
59              tmp =  cfg_params['CMSSW.datasetpath']
60              log.debug(6, "CMSSW::CMSSW(): datasetPath = "+tmp)
61              if string.lower(tmp)=='none':
62                  self.datasetPath = None
63 +                self.selectNoInput = 1
64              else:
65                  self.datasetPath = tmp
66 +                self.selectNoInput = 0
67          except KeyError:
68              msg = "Error: datasetpath not defined "  
69              raise CrabException(msg)
# Line 82 | Line 100 | class Cmssw(JobType):
100          try:
101              self.pset = cfg_params['CMSSW.pset']
102              log.debug(6, "Cmssw::Cmssw(): PSet file = "+self.pset)
103 <            if (not os.path.exists(self.pset)):
104 <                raise CrabException("User defined PSet file "+self.pset+" does not exist")
103 >            if self.pset.lower() != 'none' :
104 >                if (not os.path.exists(self.pset)):
105 >                    raise CrabException("User defined PSet file "+self.pset+" does not exist")
106 >            else:
107 >                self.pset = None
108          except KeyError:
109              raise CrabException("PSet file missing. Cannot run cmsRun ")
110  
111          # output files
112 +        ## stuff which must be returned always via sandbox
113 +        self.output_file_sandbox = []
114 +
115 +        # add fjr report by default via sandbox
116 +        self.output_file_sandbox.append(self.fjrFileName)
117 +
118 +        # other output files to be returned via sandbox or copied to SE
119          try:
120              self.output_file = []
93
121              tmp = cfg_params['CMSSW.output_file']
122              if tmp != '':
123                  tmpOutFiles = string.split(cfg_params['CMSSW.output_file'],',')
# Line 100 | Line 127 | class Cmssw(JobType):
127                      self.output_file.append(tmp)
128                      pass
129              else:
130 <                log.message("No output file defined: only stdout/err will be available")
130 >                log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available")
131                  pass
132              pass
133          except KeyError:
134 <            log.message("No output file defined: only stdout/err will be available")
134 >            log.message("No output file defined: only stdout/err and the CRAB Framework Job Report will be available")
135              pass
136  
137          # script_exe file as additional file in inputSandbox
138          try:
139              self.scriptExe = cfg_params['USER.script_exe']
113            self.additional_inbox_files.append(self.scriptExe)
140              if self.scriptExe != '':
141                 if not os.path.isfile(self.scriptExe):
142 <                  msg ="WARNING. file "+self.scriptExe+" not found"
142 >                  msg ="ERROR. file "+self.scriptExe+" not found"
143                    raise CrabException(msg)
144 +               self.additional_inbox_files.append(string.strip(self.scriptExe))
145          except KeyError:
146 <           pass
147 <                  
146 >            self.scriptExe = ''
147 >
148 >        #CarlosDaniele
149 >        if self.datasetPath == None and self.pset == None and self.scriptExe == '' :
150 >           msg ="Error. script_exe  not defined"
151 >           raise CrabException(msg)
152 >
153          ## additional input files
154          try:
155 <            tmpAddFiles = string.split(cfg_params['CMSSW.additional_input_files'],',')
155 >            tmpAddFiles = string.split(cfg_params['USER.additional_input_files'],',')
156              for tmp in tmpAddFiles:
157 <                if not os.path.exists(tmp):
158 <                    raise CrabException("Additional input file not found: "+tmp)
159 <                tmp=string.strip(tmp)
160 <                self.additional_inbox_files.append(tmp)
157 >                tmp = string.strip(tmp)
158 >                dirname = ''
159 >                if not tmp[0]=="/": dirname = "."
160 >                files = glob.glob(os.path.join(dirname, tmp))
161 >                for file in files:
162 >                    if not os.path.exists(file):
163 >                        raise CrabException("Additional input file not found: "+file)
164 >                    pass
165 >                    storedFile = common.work_space.shareDir()+file
166 >                    shutil.copyfile(file, storedFile)
167 >                    self.additional_inbox_files.append(string.strip(storedFile))
168                  pass
169              pass
170 +            common.logger.debug(5,"Additional input files: "+str(self.additional_inbox_files))
171          except KeyError:
172              pass
173  
174          # files per job
175          try:
176 <            self.filesPerJob = int(cfg_params['CMSSW.files_per_jobs']) #Daniele
177 <            self.selectFilesPerJob = 1
176 >            if (cfg_params['CMSSW.files_per_jobs']):
177 >                raise CrabException("files_per_jobs no longer supported.  Quitting.")
178          except KeyError:
179 <            self.filesPerJob = 0
140 <            self.selectFilesPerJob = 0
179 >            pass
180  
181          ## Events per job
182          try:
# Line 147 | Line 186 | class Cmssw(JobType):
186              self.eventsPerJob = -1
187              self.selectEventsPerJob = 0
188      
189 <        # To be implemented
190 <        # ## number of jobs
191 <        # try:
192 <        #     self.numberOfJobs =int( cfg_params['CMSSW.number_of_job'])
193 <        #     self.selectNumberOfJobs = 1
194 <        # except KeyError:
195 <        #     self.selectNumberOfJobs = 0
157 <
158 <        if (self.selectFilesPerJob == self.selectEventsPerJob):
159 <            msg = 'Must define either files_per_jobs or events_per_job'
160 <            raise CrabException(msg)
189 >        ## number of jobs
190 >        try:
191 >            self.theNumberOfJobs =int( cfg_params['CMSSW.number_of_jobs'])
192 >            self.selectNumberOfJobs = 1
193 >        except KeyError:
194 >            self.theNumberOfJobs = 0
195 >            self.selectNumberOfJobs = 0
196  
162        if (self.selectEventsPerJob  and not self.datasetPath == None):
163            msg = 'Splitting according to events_per_job available only with None as datasetpath'
164            raise CrabException(msg)
165    
197          try:
198              self.total_number_of_events = int(cfg_params['CMSSW.total_number_of_events'])
199 +            self.selectTotalNumberEvents = 1
200          except KeyError:
201 <            msg = 'Must define total_number_of_events'
202 <            raise CrabException(msg)
203 <        
204 <        CEBlackList = []
201 >            self.total_number_of_events = 0
202 >            self.selectTotalNumberEvents = 0
203 >
204 >        if self.pset != None: #CarlosDaniele
205 >             if ( (self.selectTotalNumberEvents + self.selectEventsPerJob + self.selectNumberOfJobs) != 2 ):
206 >                 msg = 'Must define exactly two of total_number_of_events, events_per_job, or number_of_jobs.'
207 >                 raise CrabException(msg)
208 >        else:
209 >             if (self.selectNumberOfJobs == 0):
210 >                 msg = 'Must specify  number_of_jobs.'
211 >                 raise CrabException(msg)
212 >
213 >        ## source seed for pythia
214          try:
215 <            tmpBad = string.split(cfg_params['EDG.ce_black_list'],',')
175 <            for tmp in tmpBad:
176 <                tmp=string.strip(tmp)
177 <                CEBlackList.append(tmp)
215 >            self.sourceSeed = int(cfg_params['CMSSW.pythia_seed'])
216          except KeyError:
217 <            pass
217 >            self.sourceSeed = None
218 >            common.logger.debug(5,"No seed given")
219  
220 <        self.reCEBlackList=[]
221 <        for bad in CEBlackList:
183 <            self.reCEBlackList.append(re.compile( bad ))
184 <
185 <        common.logger.debug(5,'CEBlackList: '+str(CEBlackList))
186 <
187 <        CEWhiteList = []
188 <        try:
189 <            tmpGood = string.split(cfg_params['EDG.ce_white_list'],',')
190 <            for tmp in tmpGood:
191 <                tmp=string.strip(tmp)
192 <                CEWhiteList.append(tmp)
220 >        try:
221 >            self.sourceSeedVtx = int(cfg_params['CMSSW.vtx_seed'])
222          except KeyError:
223 <            pass
224 <
225 <        #print 'CEWhiteList: ',CEWhiteList
226 <        self.reCEWhiteList=[]
227 <        for Good in CEWhiteList:
228 <            self.reCEWhiteList.append(re.compile( Good ))
229 <
230 <        common.logger.debug(5,'CEWhiteList: '+str(CEWhiteList))
231 <
203 <        self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
223 >            self.sourceSeedVtx = None
224 >            common.logger.debug(5,"No vertex seed given")
225 >        try:
226 >            self.firstRun = int(cfg_params['CMSSW.first_run'])
227 >        except KeyError:
228 >            self.firstRun = None
229 >            common.logger.debug(5,"No first run given")
230 >        if self.pset != None: #CarlosDaniele
231 >            self.PsetEdit = PsetManipulator.PsetManipulator(self.pset) #Daniele Pset
232  
233          #DBSDLS-start
234          ## Initialize the variables that are extracted from DBS/DLS and needed in other places of the code
235          self.maxEvents=0  # max events available   ( --> check the requested nb. of evts in Creator.py)
236          self.DBSPaths={}  # all dbs paths requested ( --> input to the site local discovery script)
237 +        self.jobDestination=[]  # Site destination(s) for each job (list of lists)
238          ## Perform the data location and discovery (based on DBS/DLS)
239          ## SL: Don't if NONE is specified as input (pythia use case)
240 <        common.analisys_common_info['sites']=None
240 >        blockSites = {}
241          if self.datasetPath:
242 <            self.DataDiscoveryAndLocation(cfg_params)
242 >            blockSites = self.DataDiscoveryAndLocation(cfg_params)
243          #DBSDLS-end          
244  
245          self.tgzNameWithPath = self.getTarBall(self.executable)
217
218        # modify Pset
219        if (self.datasetPath): # standard job
220            self.PsetEdit.maxEvent(self.eventsPerJob) #Daniele  
221            self.PsetEdit.inputModule("INPUT") #Daniele
222
223        else:  # pythia like job
224            self.PsetEdit.maxEvent(self.eventsPerJob) #Daniele  
225            self.PsetEdit.pythiaSeed("INPUT") #Daniele
226            try:
227                self.sourceSeed = int(cfg_params['CMSSW.pythia_seed'])
228            except KeyError:
229                self.sourceSeed = 123456
230                common.logger.message("No seed given, will use "+str(self.sourceSeed))
231        
232        self.PsetEdit.psetWriter(self.configFilename())
246      
247          ## Select Splitting
248 <        if self.selectFilesPerJob: self.jobSplittingPerFiles()
249 <        elif self.selectEventsPerJob: self.jobSplittingPerEvents()
250 <        else:
251 <            msg = 'Don\'t know how to split...'
252 <            raise CrabException(msg)
248 >        if self.selectNoInput:
249 >            if self.pset == None: #CarlosDaniele
250 >                self.jobSplittingForScript()
251 >            else:
252 >                self.jobSplittingNoInput()
253 >        else:
254 >            self.jobSplittingByBlocks(blockSites)
255  
256 +        # modify Pset
257 +        if self.pset != None: #CarlosDaniele
258 +            try:
259 +                if (self.datasetPath): # standard job
260 +                    # allow to processa a fraction of events in a file
261 +                    self.PsetEdit.inputModule("INPUT")
262 +                    self.PsetEdit.maxEvent("INPUTMAXEVENTS")
263 +                    self.PsetEdit.skipEvent("INPUTSKIPEVENTS")
264 +                else:  # pythia like job
265 +                    self.PsetEdit.maxEvent(self.eventsPerJob)
266 +                    if (self.firstRun):
267 +                        self.PsetEdit.pythiaFirstRun("INPUTFIRSTRUN")  #First Run
268 +                    if (self.sourceSeed) :
269 +                        self.PsetEdit.pythiaSeed("INPUT")
270 +                        if (self.sourceSeedVtx) :
271 +                            self.PsetEdit.pythiaSeedVtx("INPUTVTX")
272 +                # add FrameworkJobReport to parameter-set
273 +                self.PsetEdit.addCrabFJR(self.fjrFileName)
274 +                self.PsetEdit.psetWriter(self.configFilename())
275 +            except:
276 +                msg='Error while manipuliating ParameterSet: exiting...'
277 +                raise CrabException(msg)
278  
279      def DataDiscoveryAndLocation(self, cfg_params):
280  
# Line 245 | Line 282 | class Cmssw(JobType):
282  
283          datasetPath=self.datasetPath
284  
248        ## TODO
249        dataTiersList = ""
250        dataTiers = dataTiersList.split(',')
251
285          ## Contact the DBS
286 +        common.logger.message("Contacting DBS...")
287          try:
288 <            self.pubdata=DataDiscovery_EDM.DataDiscovery_EDM(datasetPath, dataTiers, cfg_params)
288 >
289 >            if self.use_dbs_2 == 1 :
290 >                self.pubdata=DataDiscovery_DBS2.DataDiscovery_DBS2(datasetPath, cfg_params)
291 >            else :
292 >                self.pubdata=DataDiscovery.DataDiscovery(datasetPath, cfg_params)
293              self.pubdata.fetchDBSInfo()
294  
295 <        except DataDiscovery_EDM.NotExistingDatasetError, ex :
295 >        except DataDiscovery.NotExistingDatasetError, ex :
296              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
297              raise CrabException(msg)
298 <
261 <        except DataDiscovery_EDM.NoDataTierinProvenanceError, ex :
298 >        except DataDiscovery.NoDataTierinProvenanceError, ex :
299              msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
300              raise CrabException(msg)
301 <        except DataDiscovery_EDM.DataDiscoveryError, ex:
302 <            msg = 'ERROR ***: failed Data Discovery in DBS  %s'%ex.getErrorMessage()
301 >        except DataDiscovery.DataDiscoveryError, ex:
302 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
303 >            raise CrabException(msg)
304 >        except DataDiscovery_DBS2.NotExistingDatasetError_DBS2, ex :
305 >            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
306 >            raise CrabException(msg)
307 >        except DataDiscovery_DBS2.NoDataTierinProvenanceError_DBS2, ex :
308 >            msg = 'ERROR ***: failed Data Discovery in DBS : %s'%ex.getErrorMessage()
309 >            raise CrabException(msg)
310 >        except DataDiscovery_DBS2.DataDiscoveryError_DBS2, ex:
311 >            msg = 'ERROR ***: failed Data Discovery in DBS :  %s'%ex.getErrorMessage()
312              raise CrabException(msg)
313  
314          ## get list of all required data in the form of dbs paths  (dbs path = /dataset/datatier/owner)
269        ## self.DBSPaths=self.pubdata.getDBSPaths()
315          common.logger.message("Required data are :"+self.datasetPath)
316  
317 <        filesbyblock=self.pubdata.getFiles()
318 <        self.AllInputFiles=filesbyblock.values()
319 <        self.files = self.AllInputFiles        
275 <
276 <        ## TEMP
277 <    #    self.filesTmp = filesbyblock.values()
278 <    #    self.files = []
279 <    #    locPath='rfio:cmsbose2.bo.infn.it:/flatfiles/SE00/cms/fanfani/ProdTest/'
280 <    #    locPath=''
281 <    #    tmp = []
282 <    #    for file in self.filesTmp[0]:
283 <    #        tmp.append(locPath+file)
284 <    #    self.files.append(tmp)
285 <        ## END TEMP
317 >        self.filesbyblock=self.pubdata.getFiles()
318 >        self.eventsbyblock=self.pubdata.getEventsPerBlock()
319 >        self.eventsbyfile=self.pubdata.getEventsPerFile()
320  
321          ## get max number of events
288        #common.logger.debug(10,"number of events for primary fileblocks %i"%self.pubdata.getMaxEvents())
322          self.maxEvents=self.pubdata.getMaxEvents() ##  self.maxEvents used in Creator.py
323 <        common.logger.message("\nThe number of available events is %s"%self.maxEvents)
323 >        common.logger.message("The number of available events is %s\n"%self.maxEvents)
324  
325 +        common.logger.message("Contacting DLS...")
326          ## Contact the DLS and build a list of sites hosting the fileblocks
327          try:
328 <            dataloc=DataLocation_EDM.DataLocation_EDM(filesbyblock.keys(),cfg_params)
328 >            dataloc=DataLocation.DataLocation(self.filesbyblock.keys(),cfg_params)
329              dataloc.fetchDLSInfo()
330 <        except DataLocation_EDM.DataLocationError , ex:
330 >        except DataLocation.DataLocationError , ex:
331              msg = 'ERROR ***: failed Data Location in DLS \n %s '%ex.getErrorMessage()
332              raise CrabException(msg)
333          
300        allsites=dataloc.getSites()
301        common.logger.debug(5,"sites are %s"%allsites)
302        sites=self.checkBlackList(allsites)
303        common.logger.debug(5,"sites are (after black list) %s"%sites)
304        sites=self.checkWhiteList(sites)
305        common.logger.debug(5,"sites are (after white list) %s"%sites)
334  
335 <        if len(sites)==0:
336 <            msg = 'No sites hosting all the needed data! Exiting... '
337 <            raise CrabException(msg)
335 >        sites = dataloc.getSites()
336 >        allSites = []
337 >        listSites = sites.values()
338 >        for listSite in listSites:
339 >            for oneSite in listSite:
340 >                allSites.append(oneSite)
341 >        allSites = self.uniquelist(allSites)
342  
343 <        common.logger.message("List of Sites hosting the data : "+str(sites))
344 <        common.logger.debug(6, "List of Sites: "+str(sites))
345 <        common.analisys_common_info['sites']=sites    ## used in SchedulerEdg.py in createSchScript
314 <        self.setParam_('TargetCE', ','.join(sites))
315 <        return
343 >        common.logger.message("Sites ("+str(len(allSites))+") hosting part/all of dataset: "+str(allSites))
344 >        common.logger.debug(6, "List of Sites: "+str(allSites))
345 >        return sites
346      
347 <    def jobSplittingPerFiles(self):
347 >    def jobSplittingByBlocks(self, blockSites):
348          """
349 <        Perform job splitting based on number of files to be accessed per job
350 <        """
351 <        common.logger.debug(5,'Splitting per input files')
352 <        common.logger.message('Required '+str(self.filesPerJob)+' files per job ')
353 <        common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
354 <
355 <        ## TODO: SL need to have (from DBS) a detailed list of how many events per each file
356 <        n_tot_files = (len(self.files[0]))
357 <        ## SL: this is wrong if the files have different number of events
358 <        evPerFile = int(self.maxEvents)/n_tot_files
359 <        
360 <        common.logger.debug(5,'Events per File '+str(evPerFile))
361 <
362 <        ## if asked to process all events, do it
363 <        if self.total_number_of_events == -1:
364 <            self.total_number_of_events=self.maxEvents
365 <            self.total_number_of_jobs = int(n_tot_files)*1/int(self.filesPerJob)
366 <            common.logger.message(str(self.total_number_of_jobs)+' jobs will be created for all available events '+str(self.total_number_of_events)+' events')
367 <        
349 >        Perform job splitting. Jobs run over an integer number of files
350 >        and no more than one block.
351 >        ARGUMENT: blockSites: dictionary with blocks as keys and list of host sites as values
352 >        REQUIRES: self.selectTotalNumberEvents, self.selectEventsPerJob, self.selectNumberofJobs,
353 >                  self.total_number_of_events, self.eventsPerJob, self.theNumberOfJobs,
354 >                  self.maxEvents, self.filesbyblock
355 >        SETS: self.jobDestination - Site destination(s) for each job (a list of lists)
356 >              self.total_number_of_jobs - Total # of jobs
357 >              self.list_of_args - File(s) job will run on (a list of lists)
358 >        """
359 >
360 >        # ---- Handle the possible job splitting configurations ---- #
361 >        if (self.selectTotalNumberEvents):
362 >            totalEventsRequested = self.total_number_of_events
363 >        if (self.selectEventsPerJob):
364 >            eventsPerJobRequested = self.eventsPerJob
365 >            if (self.selectNumberOfJobs):
366 >                totalEventsRequested = self.theNumberOfJobs * self.eventsPerJob
367 >
368 >        # If user requested all the events in the dataset
369 >        if (totalEventsRequested == -1):
370 >            eventsRemaining=self.maxEvents
371 >        # If user requested more events than are in the dataset
372 >        elif (totalEventsRequested > self.maxEvents):
373 >            eventsRemaining = self.maxEvents
374 >            common.logger.message("Requested "+str(self.total_number_of_events)+ " events, but only "+str(self.maxEvents)+" events are available.")
375 >        # If user requested less events than are in the dataset
376          else:
377 <            self.total_number_of_files = int(self.total_number_of_events/evPerFile)
340 <            ## SL: if ask for less event than what is computed to be available on a
341 <            ##     file, process the first file anyhow.
342 <            if self.total_number_of_files == 0:
343 <                self.total_number_of_files = self.total_number_of_files + 1
377 >            eventsRemaining = totalEventsRequested
378  
379 <            common.logger.debug(5,'N files  '+str(self.total_number_of_files))
379 >        # If user requested more events per job than are in the dataset
380 >        if (self.selectEventsPerJob and eventsPerJobRequested > self.maxEvents):
381 >            eventsPerJobRequested = self.maxEvents
382  
383 <            check = 0
384 <            
349 <            ## Compute the number of jobs
350 <            #self.total_number_of_jobs = int(n_tot_files)*1/int(self.filesPerJob)
351 <            self.total_number_of_jobs = int(self.total_number_of_files/self.filesPerJob)
352 <            common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
383 >        # For user info at end
384 >        totalEventCount = 0
385  
386 <            ## is there any remainder?
387 <            check = int(self.total_number_of_files) - (int(self.total_number_of_jobs)*self.filesPerJob)
386 >        if (self.selectTotalNumberEvents and self.selectNumberOfJobs):
387 >            eventsPerJobRequested = int(eventsRemaining/self.theNumberOfJobs)
388  
389 <            common.logger.debug(5,'Check  '+str(check))
389 >        if (self.selectNumberOfJobs):
390 >            common.logger.message("May not create the exact number_of_jobs requested.")
391  
392 <            if check > 0:
393 <                self.total_number_of_jobs =  self.total_number_of_jobs + 1
394 <                common.logger.message('Warning: last job will be created with '+str(check)+' files')
392 >        if ( self.ncjobs == 'all' ) :
393 >            totalNumberOfJobs = 999999999
394 >        else :
395 >            totalNumberOfJobs = self.ncjobs
396 >            
397  
398 <            common.logger.message(str(self.total_number_of_jobs)+' jobs will be created for a total of '+str((self.total_number_of_jobs-1)*self.filesPerJob*evPerFile + check*evPerFile)+' events')
399 <            pass
398 >        blocks = blockSites.keys()
399 >        blockCount = 0
400 >        # Backup variable in case self.maxEvents counted events in a non-included block
401 >        numBlocksInDataset = len(blocks)
402  
403 +        jobCount = 0
404          list_of_lists = []
405 <        for i in xrange(0, int(n_tot_files), self.filesPerJob):
406 <            parString = "\\{"
405 >
406 >        # ---- Iterate over the blocks in the dataset until ---- #
407 >        # ---- we've met the requested total # of events    ---- #
408 >        while ( (eventsRemaining > 0) and (blockCount < numBlocksInDataset) and (jobCount < totalNumberOfJobs)):
409 >            block = blocks[blockCount]
410 >            blockCount += 1
411              
412 <            params = self.files[0][i: i+self.filesPerJob]
413 <            for i in range(len(params) - 1):
414 <                parString += '\\\"' + params[i] + '\\\"\,'
412 >            if self.eventsbyblock.has_key(block) :
413 >                numEventsInBlock = self.eventsbyblock[block]
414 >                common.logger.debug(5,'Events in Block File '+str(numEventsInBlock))
415              
416 <            parString += '\\\"' + params[len(params) - 1] + '\\\"\\}'
417 <            list_of_lists.append(parString)
418 <            pass
419 <
416 >                files = self.filesbyblock[block]
417 >                numFilesInBlock = len(files)
418 >                if (numFilesInBlock <= 0):
419 >                    continue
420 >                fileCount = 0
421 >
422 >                # ---- New block => New job ---- #
423 >                parString = "\\{"
424 >                # counter for number of events in files currently worked on
425 >                filesEventCount = 0
426 >                # flag if next while loop should touch new file
427 >                newFile = 1
428 >                # job event counter
429 >                jobSkipEventCount = 0
430 >            
431 >                # ---- Iterate over the files in the block until we've met the requested ---- #
432 >                # ---- total # of events or we've gone over all the files in this block  ---- #
433 >                while ( (eventsRemaining > 0) and (fileCount < numFilesInBlock) and (jobCount < totalNumberOfJobs) ):
434 >                    file = files[fileCount]
435 >                    if newFile :
436 >                        try:
437 >                            numEventsInFile = self.eventsbyfile[file]
438 >                            common.logger.debug(6, "File "+str(file)+" has "+str(numEventsInFile)+" events")
439 >                            # increase filesEventCount
440 >                            filesEventCount += numEventsInFile
441 >                            # Add file to current job
442 >                            parString += '\\\"' + file + '\\\"\,'
443 >                            newFile = 0
444 >                        except KeyError:
445 >                            common.logger.message("File "+str(file)+" has unknown number of events: skipping")
446 >                        
447 >
448 >                    # if less events in file remain than eventsPerJobRequested
449 >                    if ( filesEventCount - jobSkipEventCount < eventsPerJobRequested ) :
450 >                        # if last file in block
451 >                        if ( fileCount == numFilesInBlock-1 ) :
452 >                            # end job using last file, use remaining events in block
453 >                            # close job and touch new file
454 >                            fullString = parString[:-2]
455 >                            fullString += '\\}'
456 >                            list_of_lists.append([fullString,str(-1),str(jobSkipEventCount)])
457 >                            common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(filesEventCount - jobSkipEventCount)+" events (last file in block).")
458 >                            self.jobDestination.append(blockSites[block])
459 >                            common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
460 >                            # reset counter
461 >                            jobCount = jobCount + 1
462 >                            totalEventCount = totalEventCount + filesEventCount - jobSkipEventCount
463 >                            eventsRemaining = eventsRemaining - filesEventCount + jobSkipEventCount
464 >                            jobSkipEventCount = 0
465 >                            # reset file
466 >                            parString = "\\{"
467 >                            filesEventCount = 0
468 >                            newFile = 1
469 >                            fileCount += 1
470 >                        else :
471 >                            # go to next file
472 >                            newFile = 1
473 >                            fileCount += 1
474 >                    # if events in file equal to eventsPerJobRequested
475 >                    elif ( filesEventCount - jobSkipEventCount == eventsPerJobRequested ) :
476 >                        # close job and touch new file
477 >                        fullString = parString[:-2]
478 >                        fullString += '\\}'
479 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
480 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
481 >                        self.jobDestination.append(blockSites[block])
482 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
483 >                        # reset counter
484 >                        jobCount = jobCount + 1
485 >                        totalEventCount = totalEventCount + eventsPerJobRequested
486 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
487 >                        jobSkipEventCount = 0
488 >                        # reset file
489 >                        parString = "\\{"
490 >                        filesEventCount = 0
491 >                        newFile = 1
492 >                        fileCount += 1
493 >                        
494 >                    # if more events in file remain than eventsPerJobRequested
495 >                    else :
496 >                        # close job but don't touch new file
497 >                        fullString = parString[:-2]
498 >                        fullString += '\\}'
499 >                        list_of_lists.append([fullString,str(eventsPerJobRequested),str(jobSkipEventCount)])
500 >                        common.logger.debug(3,"Job "+str(jobCount+1)+" can run over "+str(eventsPerJobRequested)+" events.")
501 >                        self.jobDestination.append(blockSites[block])
502 >                        common.logger.debug(5,"Job "+str(jobCount+1)+" Destination: "+str(self.jobDestination[jobCount]))
503 >                        # increase counter
504 >                        jobCount = jobCount + 1
505 >                        totalEventCount = totalEventCount + eventsPerJobRequested
506 >                        eventsRemaining = eventsRemaining - eventsPerJobRequested
507 >                        # calculate skip events for last file
508 >                        # use filesEventCount (contains several files), jobSkipEventCount and eventsPerJobRequest
509 >                        jobSkipEventCount = eventsPerJobRequested - (filesEventCount - jobSkipEventCount - self.eventsbyfile[file])
510 >                        # remove all but the last file
511 >                        filesEventCount = self.eventsbyfile[file]
512 >                        parString = "\\{"
513 >                        parString += '\\\"' + file + '\\\"\,'
514 >                    pass # END if
515 >                pass # END while (iterate over files in the block)
516 >        pass # END while (iterate over blocks in the dataset)
517 >        self.ncjobs = self.total_number_of_jobs = jobCount
518 >        if (eventsRemaining > 0 and jobCount < totalNumberOfJobs ):
519 >            common.logger.message("Could not run on all requested events because some blocks not hosted at allowed sites.")
520 >        common.logger.message("\n"+str(jobCount)+" job(s) can run on "+str(totalEventCount)+" events.\n")
521 >        
522          self.list_of_args = list_of_lists
379        print self.list_of_args
523          return
524  
525 <    def jobSplittingPerEvents(self):
525 >    def jobSplittingNoInput(self):
526          """
527          Perform job splitting based on number of event per job
528          """
529          common.logger.debug(5,'Splitting per events')
530          common.logger.message('Required '+str(self.eventsPerJob)+' events per job ')
531 +        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
532          common.logger.message('Required '+str(self.total_number_of_events)+' events in total ')
533  
534          if (self.total_number_of_events < 0):
535              msg='Cannot split jobs per Events with "-1" as total number of events'
536              raise CrabException(msg)
537  
538 <        self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
539 <        
538 >        if (self.selectEventsPerJob):
539 >            if (self.selectTotalNumberEvents):
540 >                self.total_number_of_jobs = int(self.total_number_of_events/self.eventsPerJob)
541 >            elif(self.selectNumberOfJobs) :  
542 >                self.total_number_of_jobs =self.theNumberOfJobs
543 >                self.total_number_of_events =int(self.theNumberOfJobs*self.eventsPerJob)
544 >
545 >        elif (self.selectNumberOfJobs) :
546 >            self.total_number_of_jobs = self.theNumberOfJobs
547 >            self.eventsPerJob = int(self.total_number_of_events/self.total_number_of_jobs)
548 >
549          common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
550  
551          # is there any remainder?
# Line 400 | Line 553 | class Cmssw(JobType):
553  
554          common.logger.debug(5,'Check  '+str(check))
555  
556 +        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created, each for '+str(self.eventsPerJob)+' for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
557          if check > 0:
558 <            common.logger.message('Warning: asked '+self.total_number_of_events+' but will do only '+(int(self.total_number_of_jobs)*self.eventsPerJob))
405 <
406 <        common.logger.message(str(self.total_number_of_jobs)+' jobs will be created for a total of '+str(self.total_number_of_jobs*self.eventsPerJob)+' events')
558 >            common.logger.message('Warning: asked '+str(self.total_number_of_events)+' but can do only '+str(int(self.total_number_of_jobs)*self.eventsPerJob))
559  
560          # argument is seed number.$i
561          self.list_of_args = []
562          for i in range(self.total_number_of_jobs):
563 <            self.list_of_args.append(int(str(self.sourceSeed)+str(i)))
564 <        print self.list_of_args
563 >            ## Since there is no input, any site is good
564 >           # self.jobDestination.append(["Any"])
565 >            self.jobDestination.append([""]) #must be empty to write correctly the xml
566 >            args=''
567 >            if (self.firstRun):
568 >                    ## pythia first run
569 >                #self.list_of_args.append([(str(self.firstRun)+str(i))])
570 >                args=args+(str(self.firstRun)+str(i))
571 >            else:
572 >                ## no first run
573 >                #self.list_of_args.append([str(i)])
574 >                args=args+str(i)
575 >            if (self.sourceSeed):
576 >                if (self.sourceSeedVtx):
577 >                    ## pythia + vtx random seed
578 >                    #self.list_of_args.append([
579 >                    #                          str(self.sourceSeed)+str(i),
580 >                    #                          str(self.sourceSeedVtx)+str(i)
581 >                    #                          ])
582 >                    args=args+str(',')+str(self.sourceSeed)+str(i)+str(',')+str(self.sourceSeedVtx)+str(i)
583 >                else:
584 >                    ## only pythia random seed
585 >                    #self.list_of_args.append([(str(self.sourceSeed)+str(i))])
586 >                    args=args +str(',')+str(self.sourceSeed)+str(i)
587 >            else:
588 >                ## no random seed
589 >                if str(args)=='': args=args+(str(self.firstRun)+str(i))
590 >            arguments=args.split(',')
591 >            if len(arguments)==3:self.list_of_args.append([str(arguments[0]),str(arguments[1]),str(arguments[2])])
592 >            elif len(arguments)==2:self.list_of_args.append([str(arguments[0]),str(arguments[1])])
593 >            else :self.list_of_args.append([str(arguments[0])])
594 >            
595 >     #   print self.list_of_args
596  
597          return
598  
599 +
600 +    def jobSplittingForScript(self):#CarlosDaniele
601 +        """
602 +        Perform job splitting based on number of job
603 +        """
604 +        common.logger.debug(5,'Splitting per job')
605 +        common.logger.message('Required '+str(self.theNumberOfJobs)+' jobs in total ')
606 +
607 +        self.total_number_of_jobs = self.theNumberOfJobs
608 +
609 +        common.logger.debug(5,'N jobs  '+str(self.total_number_of_jobs))
610 +
611 +        common.logger.message(str(self.total_number_of_jobs)+' jobs can be created')
612 +
613 +        # argument is seed number.$i
614 +        self.list_of_args = []
615 +        for i in range(self.total_number_of_jobs):
616 +            ## Since there is no input, any site is good
617 +           # self.jobDestination.append(["Any"])
618 +            self.jobDestination.append([""])
619 +            ## no random seed
620 +            self.list_of_args.append([str(i)])
621 +        return
622 +
623      def split(self, jobParams):
624  
625          common.jobDB.load()
# Line 424 | Line 631 | class Cmssw(JobType):
631              jobParams.append("")
632          
633          for job in range(njobs):
634 <            jobParams[job] = str(arglist[job])
634 >            jobParams[job] = arglist[job]
635 >            # print str(arglist[job])
636 >            # print jobParams[job]
637              common.jobDB.setArguments(job, jobParams[job])
638 +            common.logger.debug(5,"Job "+str(job)+" Destination: "+str(self.jobDestination[job]))
639 +            common.jobDB.setDestination(job, self.jobDestination[job])
640  
641          common.jobDB.save()
642          return
643      
644      def getJobTypeArguments(self, nj, sched):
645 <        return common.jobDB.arguments(nj)
645 >        result = ''
646 >        for i in common.jobDB.arguments(nj):
647 >            result=result+str(i)+" "
648 >        return result
649    
650      def numberOfJobs(self):
651          # Fabio
652          return self.total_number_of_jobs
653  
440    def checkBlackList(self, allSites):
441        if len(self.reCEBlackList)==0: return allSites
442        sites = []
443        for site in allSites:
444            common.logger.debug(10,'Site '+site)
445            good=1
446            for re in self.reCEBlackList:
447                if re.search(site):
448                    common.logger.message('CE in black list, skipping site '+site)
449                    good=0
450                pass
451            if good: sites.append(site)
452        if len(sites) == 0:
453            common.logger.debug(3,"No sites found after BlackList")
454        return sites
455
456    def checkWhiteList(self, allSites):
457
458        if len(self.reCEWhiteList)==0: return allSites
459        sites = []
460        for site in allSites:
461            good=0
462            for re in self.reCEWhiteList:
463                if re.search(site):
464                    common.logger.debug(5,'CE in white list, adding site '+site)
465                    good=1
466                if not good: continue
467                sites.append(site)
468        if len(sites) == 0:
469            common.logger.message("No sites found after WhiteList\n")
470        else:
471            common.logger.debug(5,"Selected sites via WhiteList are "+str(sites)+"\n")
472        return sites
473
654      def getTarBall(self, exe):
655          """
656          Return the TarBall with lib and exe
657          """
658          
659          # if it exist, just return it
660 <        self.tgzNameWithPath = common.work_space.shareDir()+self.tgz_name
660 >        #
661 >        # Marco. Let's start to use relative path for Boss XML files
662 >        #
663 >        self.tgzNameWithPath = common.work_space.pathForTgz()+'share/'+self.tgz_name
664          if os.path.exists(self.tgzNameWithPath):
665              return self.tgzNameWithPath
666  
# Line 491 | Line 674 | class Cmssw(JobType):
674          # First of all declare the user Scram area
675          swArea = self.scram.getSWArea_()
676          #print "swArea = ", swArea
677 <        swVersion = self.scram.getSWVersion()
678 <        #print "swVersion = ", swVersion
677 >        # swVersion = self.scram.getSWVersion()
678 >        # print "swVersion = ", swVersion
679          swReleaseTop = self.scram.getReleaseTop_()
680          #print "swReleaseTop = ", swReleaseTop
681          
# Line 500 | Line 683 | class Cmssw(JobType):
683          if swReleaseTop == '' or swArea == swReleaseTop:
684              return
685  
686 <        filesToBeTarred = []
687 <        ## First find the executable
688 <        if (self.executable != ''):
689 <            exeWithPath = self.scram.findFile_(executable)
690 < #           print exeWithPath
691 <            if ( not exeWithPath ):
692 <                raise CrabException('User executable '+executable+' not found')
693 <
694 <            ## then check if it's private or not
695 <            if exeWithPath.find(swReleaseTop) == -1:
696 <                # the exe is private, so we must ship
697 <                common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
698 <                path = swArea+'/'
699 <                exe = string.replace(exeWithPath, path,'')
700 <                filesToBeTarred.append(exe)
701 <                pass
702 <            else:
703 <                # the exe is from release, we'll find it on WN
704 <                pass
705 <
706 <        ## Now get the libraries: only those in local working area
707 <        libDir = 'lib'
708 <        lib = swArea+'/' +libDir
709 <        common.logger.debug(5,"lib "+lib+" to be tarred")
710 <        if os.path.exists(lib):
711 <            filesToBeTarred.append(libDir)
712 <
713 <        ## Now check if module dir is present
714 <        moduleDir = 'module'
715 <        if os.path.isdir(swArea+'/'+moduleDir):
716 <            filesToBeTarred.append(moduleDir)
717 <
718 <        ## Now check if the Data dir is present
719 <        dataDir = 'src/Data/'
720 <        if os.path.isdir(swArea+'/'+dataDir):
721 <            filesToBeTarred.append(dataDir)
722 <
723 <        ## Create the tar-ball
724 <        if len(filesToBeTarred)>0:
725 <            cwd = os.getcwd()
726 <            os.chdir(swArea)
727 <            tarcmd = 'tar zcvf ' + self.tgzNameWithPath + ' '
728 <            for line in filesToBeTarred:
729 <                tarcmd = tarcmd + line + ' '
730 <            cout = runCommand(tarcmd)
731 <            if not cout:
732 <                raise CrabException('Could not create tar-ball')
733 <            os.chdir(cwd)
734 <        else:
735 <            common.logger.debug(5,"No files to be to be tarred")
686 >        import tarfile
687 >        try: # create tar ball
688 >            tar = tarfile.open(self.tgzNameWithPath, "w:gz")
689 >            ## First find the executable
690 >            if (executable != ''):
691 >                exeWithPath = self.scram.findFile_(executable)
692 >                if ( not exeWithPath ):
693 >                    raise CrabException('User executable '+executable+' not found')
694 >    
695 >                ## then check if it's private or not
696 >                if exeWithPath.find(swReleaseTop) == -1:
697 >                    # the exe is private, so we must ship
698 >                    common.logger.debug(5,"Exe "+exeWithPath+" to be tarred")
699 >                    path = swArea+'/'
700 >                    exe = string.replace(exeWithPath, path,'')
701 >                    tar.add(path+exe,executable)
702 >                    pass
703 >                else:
704 >                    # the exe is from release, we'll find it on WN
705 >                    pass
706 >    
707 >            ## Now get the libraries: only those in local working area
708 >            libDir = 'lib'
709 >            lib = swArea+'/' +libDir
710 >            common.logger.debug(5,"lib "+lib+" to be tarred")
711 >            if os.path.exists(lib):
712 >                tar.add(lib,libDir)
713 >    
714 >            ## Now check if module dir is present
715 >            moduleDir = 'module'
716 >            module = swArea + '/' + moduleDir
717 >            if os.path.isdir(module):
718 >                tar.add(module,moduleDir)
719 >
720 >            ## Now check if any data dir(s) is present
721 >            swAreaLen=len(swArea)
722 >            for root, dirs, files in os.walk(swArea):
723 >                if "data" in dirs:
724 >                    common.logger.debug(5,"data "+root+"/data"+" to be tarred")
725 >                    tar.add(root+"/data",root[swAreaLen:]+"/data")
726 >
727 >            ## Add ProdAgent dir to tar
728 >            paDir = 'ProdAgentApi'
729 >            pa = os.environ['CRABDIR'] + '/' + 'ProdAgentApi'
730 >            if os.path.isdir(pa):
731 >                tar.add(pa,paDir)
732 >        
733 >            common.logger.debug(5,"Files added to "+self.tgzNameWithPath+" : "+str(tar.getnames()))
734 >            tar.close()
735 >        except :
736 >            raise CrabException('Could not create tar-ball')
737 >
738 >        ## check for tarball size
739 >        tarballinfo = os.stat(self.tgzNameWithPath)
740 >        if ( tarballinfo.st_size > self.MaxTarBallSize*1024*1024 ) :
741 >            raise CrabException('Input sandbox size of ' + str(float(tarballinfo.st_size)/1024.0/1024.0) + ' MB is larger than the allowed ' + str(self.MaxTarBallSize) + ' MB input sandbox limit and not supported by the used GRID submission system. Please make sure that no unnecessary files are in all data directories in your local CMSSW project area as they are automatically packed into the input sandbox.')
742 >
743 >        ## create tar-ball with ML stuff
744 >        self.MLtgzfile =  common.work_space.pathForTgz()+'share/MLfiles.tgz'
745 >        try:
746 >            tar = tarfile.open(self.MLtgzfile, "w:gz")
747 >            path=os.environ['CRABDIR'] + '/python/'
748 >            for file in ['report.py', 'DashboardAPI.py', 'Logger.py', 'ProcInfo.py', 'apmon.py', 'parseCrabFjr.py']:
749 >                tar.add(path+file,file)
750 >            common.logger.debug(5,"Files added to "+self.MLtgzfile+" : "+str(tar.getnames()))
751 >            tar.close()
752 >        except :
753 >            raise CrabException('Could not create ML files tar-ball')
754          
755          return
756          
# Line 566 | Line 767 | class Cmssw(JobType):
767          txt += 'if [ $middleware == LCG ]; then \n'
768          txt += self.wsSetupCMSLCGEnvironment_()
769          txt += 'elif [ $middleware == OSG ]; then\n'
770 <        txt += '    time=`date -u +"%s"`\n'
771 <        txt += '    WORKING_DIR=$OSG_WN_TMP/cms_$time\n'
571 <        txt += '    echo "Creating working directory: $WORKING_DIR"\n'
572 <        txt += '    /bin/mkdir -p $WORKING_DIR\n'
770 >        txt += '    WORKING_DIR=`/bin/mktemp  -d $OSG_WN_TMP/cms_XXXXXXXXXXXX`\n'
771 >        txt += '    echo "Created working directory: $WORKING_DIR"\n'
772          txt += '    if [ ! -d $WORKING_DIR ] ;then\n'
773          txt += '        echo "SET_CMS_ENV 10016 ==> OSG $WORKING_DIR could not be created on WN `hostname`"\n'
774          txt += '        echo "JOB_EXIT_STATUS = 10016"\n'
775          txt += '        echo "JobExitCode=10016" | tee -a $RUNTIME_AREA/$repo\n'
776          txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
777 +        txt += '        rm -f $RUNTIME_AREA/$repo \n'
778 +        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
779 +        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
780          txt += '        exit 1\n'
781          txt += '    fi\n'
782          txt += '\n'
# Line 594 | Line 796 | class Cmssw(JobType):
796          txt += '   echo "JOB_EXIT_STATUS = 10034"\n'
797          txt += '   echo "JobExitCode=10034" | tee -a $RUNTIME_AREA/$repo\n'
798          txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
799 +        txt += '   rm -f $RUNTIME_AREA/$repo \n'
800 +        txt += '   echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
801 +        txt += '   echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
802          ## OLI_Daniele
803          txt += '    if [ $middleware == OSG ]; then \n'
804          txt += '        echo "Remove working directory: $WORKING_DIR"\n'
# Line 604 | Line 809 | class Cmssw(JobType):
809          txt += '            echo "JOB_EXIT_STATUS = 10018"\n'
810          txt += '            echo "JobExitCode=10018" | tee -a $RUNTIME_AREA/$repo\n'
811          txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
812 +        txt += '            rm -f $RUNTIME_AREA/$repo \n'
813 +        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
814 +        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
815          txt += '        fi\n'
816          txt += '    fi \n'
817          txt += '   exit 1 \n'
818          txt += 'fi \n'
819          txt += 'echo "CMSSW_VERSION =  '+self.version+'"\n'
820 +        txt += 'export SCRAM_ARCH='+self.executable_arch+'\n'
821          txt += 'cd '+self.version+'\n'
822          ### needed grep for bug in scramv1 ###
823 +        txt += scram+' runtime -sh\n'
824          txt += 'eval `'+scram+' runtime -sh | grep -v SCRAMRT_LSB_JOBNAME`\n'
825 +        txt += 'echo $PATH\n'
826  
827          # Handle the arguments:
828          txt += "\n"
829          txt += "## number of arguments (first argument always jobnumber)\n"
830          txt += "\n"
831 <        txt += "narg=$#\n"
832 <        txt += "if [ $narg -lt 2 ]\n"
831 > #        txt += "narg=$#\n"
832 >        txt += "if [ $nargs -lt 2 ]\n"
833          txt += "then\n"
834 <        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$narg+ \n"
834 >        txt += "    echo 'SET_EXE_ENV 1 ==> ERROR Too few arguments' +$nargs+ \n"
835          txt += '    echo "JOB_EXIT_STATUS = 50113"\n'
836          txt += '    echo "JobExitCode=50113" | tee -a $RUNTIME_AREA/$repo\n'
837          txt += '    dumpStatus $RUNTIME_AREA/$repo\n'
838 +        txt += '    rm -f $RUNTIME_AREA/$repo \n'
839 +        txt += '    echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
840 +        txt += '    echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
841          ## OLI_Daniele
842          txt += '    if [ $middleware == OSG ]; then \n'
843          txt += '        echo "Remove working directory: $WORKING_DIR"\n'
# Line 634 | Line 848 | class Cmssw(JobType):
848          txt += '            echo "JOB_EXIT_STATUS = 50114"\n'
849          txt += '            echo "JobExitCode=50114" | tee -a $RUNTIME_AREA/$repo\n'
850          txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
851 +        txt += '            rm -f $RUNTIME_AREA/$repo \n'
852 +        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
853 +        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
854          txt += '        fi\n'
855          txt += '    fi \n'
856          txt += "    exit 1\n"
# Line 642 | Line 859 | class Cmssw(JobType):
859  
860          # Prepare job-specific part
861          job = common.job_list[nj]
862 <        pset = os.path.basename(job.configFilename())
863 <        txt += '\n'
864 <        if (self.datasetPath): # standard job
865 <            txt += 'InputFiles=$2\n'
866 <            txt += 'echo "Inputfiles:<$InputFiles>"\n'
867 <            txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
868 <        else:  # pythia like job
869 <            txt += 'Seed=$2\n'
870 <            txt += 'echo "Seed: <$Seed>"\n'
871 <            txt += 'sed "s#INPUT#$Seed#" $RUNTIME_AREA/'+pset+' > pset.cfg\n'
862 >        if self.pset != None: #CarlosDaniele
863 >            pset = os.path.basename(job.configFilename())
864 >            txt += '\n'
865 >            if (self.datasetPath): # standard job
866 >                #txt += 'InputFiles=$2\n'
867 >                txt += 'InputFiles=${args[1]}\n'
868 >                txt += 'MaxEvents=${args[2]}\n'
869 >                txt += 'SkipEvents=${args[3]}\n'
870 >                txt += 'echo "Inputfiles:<$InputFiles>"\n'
871 >                txt += 'sed "s#{\'INPUT\'}#$InputFiles#" $RUNTIME_AREA/'+pset+' > pset_tmp_1.cfg\n'
872 >                txt += 'echo "MaxEvents:<$MaxEvents>"\n'
873 >                txt += 'sed "s#INPUTMAXEVENTS#$MaxEvents#" pset_tmp_1.cfg > pset_tmp_2.cfg\n'
874 >                txt += 'echo "SkipEvents:<$SkipEvents>"\n'
875 >                txt += 'sed "s#INPUTSKIPEVENTS#$SkipEvents#" pset_tmp_2.cfg > pset.cfg\n'
876 >            else:  # pythia like job
877 >                if (self.sourceSeed):
878 >                    txt += 'FirstRun=${args[1]}\n'
879 >                    txt += 'echo "FirstRun: <$FirstRun>"\n'
880 >                    txt += 'sed "s#\<INPUTFIRSTRUN\>#$FirstRun#" $RUNTIME_AREA/'+pset+' > tmp_1.cfg\n'
881 >                else:
882 >                    txt += '# Copy untouched pset\n'
883 >                    txt += 'cp $RUNTIME_AREA/'+pset+' tmp_1.cfg\n'
884 >                if (self.sourceSeed):
885 > #                    txt += 'Seed=$2\n'
886 >                    txt += 'Seed=${args[2]}\n'
887 >                    txt += 'echo "Seed: <$Seed>"\n'
888 >                    txt += 'sed "s#\<INPUT\>#$Seed#" tmp_1.cfg > tmp_2.cfg\n'
889 >                    if (self.sourceSeedVtx):
890 > #                        txt += 'VtxSeed=$3\n'
891 >                        txt += 'VtxSeed=${args[3]}\n'
892 >                        txt += 'echo "VtxSeed: <$VtxSeed>"\n'
893 >                        txt += 'sed "s#INPUTVTX#$VtxSeed#" tmp_2.cfg > pset.cfg\n'
894 >                    else:
895 >                        txt += 'mv tmp_2.cfg pset.cfg\n'
896 >                else:
897 >                    txt += 'mv tmp_1.cfg pset.cfg\n'
898 >                   # txt += '# Copy untouched pset\n'
899 >                   # txt += 'cp $RUNTIME_AREA/'+pset+' pset.cfg\n'
900 >
901  
902          if len(self.additional_inbox_files) > 0:
903              for file in self.additional_inbox_files:
904 <                txt += 'if [ -e $RUNTIME_AREA/'+file+' ] ; then\n'
905 <                txt += '   cp $RUNTIME_AREA/'+file+' .\n'
906 <                txt += '   chmod +x '+file+'\n'
904 >                relFile = file.split("/")[-1]
905 >                txt += 'if [ -e $RUNTIME_AREA/'+relFile+' ] ; then\n'
906 >                txt += '   cp $RUNTIME_AREA/'+relFile+' .\n'
907 >                txt += '   chmod +x '+relFile+'\n'
908                  txt += 'fi\n'
909              pass
910  
911 <        txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
912 <
913 <        txt += '\n'
914 <        txt += 'echo "***** cat pset.cfg *********"\n'
915 <        txt += 'cat pset.cfg\n'
916 <        txt += 'echo "****** end pset.cfg ********"\n'
917 <        txt += '\n'
918 <        # txt += 'echo "***** cat pset1.cfg *********"\n'
919 <        # txt += 'cat pset1.cfg\n'
920 <        # txt += 'echo "****** end pset1.cfg ********"\n'
911 >        if self.pset != None: #CarlosDaniele
912 >            txt += 'echo "### END JOB SETUP ENVIRONMENT ###"\n\n'
913 >        
914 >            txt += '\n'
915 >            txt += 'echo "***** cat pset.cfg *********"\n'
916 >            txt += 'cat pset.cfg\n'
917 >            txt += 'echo "****** end pset.cfg ********"\n'
918 >            txt += '\n'
919 >            # txt += 'echo "***** cat pset1.cfg *********"\n'
920 >            # txt += 'cat pset1.cfg\n'
921 >            # txt += 'echo "****** end pset1.cfg ********"\n'
922          return txt
923  
924 <    def wsBuildExe(self, nj):
924 >    def wsBuildExe(self, nj=0):
925          """
926          Put in the script the commands to build an executable
927          or a library.
# Line 694 | Line 942 | class Cmssw(JobType):
942              txt += '       cd $RUNTIME_AREA\n'
943              txt += '       /bin/rm -rf $WORKING_DIR\n'
944              txt += '       if [ -d $WORKING_DIR ] ;then\n'
945 <            txt += '        echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n'
946 <            txt += '        echo "JOB_EXIT_STATUS = 50999"\n'
947 <            txt += '        echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n'
948 <            txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
945 >            txt += '           echo "SET_EXE 50999 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after Untarring .tgz file failed"\n'
946 >            txt += '           echo "JOB_EXIT_STATUS = 50999"\n'
947 >            txt += '           echo "JobExitCode=50999" | tee -a $RUNTIME_AREA/$repo\n'
948 >            txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
949 >            txt += '           rm -f $RUNTIME_AREA/$repo \n'
950 >            txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
951 >            txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
952              txt += '       fi\n'
953              txt += '   fi \n'
954              txt += '   \n'
# Line 705 | Line 956 | class Cmssw(JobType):
956              txt += 'else \n'
957              txt += '   echo "Successful untar" \n'
958              txt += 'fi \n'
959 +            txt += '\n'
960 +            txt += 'echo "Include ProdAgentApi in PYTHONPATH"\n'
961 +            txt += 'if [ -z "$PYTHONPATH" ]; then\n'
962 +            txt += '   export PYTHONPATH=ProdAgentApi\n'
963 +            txt += 'else\n'
964 +            txt += '   export PYTHONPATH=ProdAgentApi:${PYTHONPATH}\n'
965 +            txt += 'fi\n'
966 +            txt += '\n'
967 +
968              pass
969          
970          return txt
# Line 716 | Line 976 | class Cmssw(JobType):
976          """
977          
978      def executableName(self):
979 <        return self.executable
979 >        if self.scriptExe: #CarlosDaniele
980 >            return "sh "
981 >        else:
982 >            return self.executable
983  
984      def executableArgs(self):
985 <        return " -p pset.cfg"
985 >        if self.scriptExe:#CarlosDaniele
986 >            return   self.scriptExe + " $NJob"
987 >        else:
988 >            return " -p pset.cfg"
989  
990      def inputSandbox(self, nj):
991          """
992          Returns a list of filenames to be put in JDL input sandbox.
993          """
994          inp_box = []
995 <        # dict added to delete duplicate from input sandbox file list
996 <        seen = {}
995 >        # # dict added to delete duplicate from input sandbox file list
996 >        # seen = {}
997          ## code
998          if os.path.isfile(self.tgzNameWithPath):
999              inp_box.append(self.tgzNameWithPath)
1000 +        if os.path.isfile(self.MLtgzfile):
1001 +            inp_box.append(self.MLtgzfile)
1002          ## config
1003 <        inp_box.append(common.job_list[nj].configFilename())
1003 >        if not self.pset is None:
1004 >            inp_box.append(common.work_space.pathForTgz() + 'job/' + self.configFilename())
1005          ## additional input files
1006 <        #for file in self.additional_inbox_files:
1007 <        #    inp_box.append(common.work_space.cwdDir()+file)
1006 >        for file in self.additional_inbox_files:
1007 >            inp_box.append(file)
1008          return inp_box
1009  
1010      def outputSandbox(self, nj):
# Line 744 | Line 1013 | class Cmssw(JobType):
1013          """
1014          out_box = []
1015  
747        stdout=common.job_list[nj].stdout()
748        stderr=common.job_list[nj].stderr()
749
1016          ## User Declared output files
1017 <        for out in self.output_file:
1017 >        for out in (self.output_file+self.output_file_sandbox):
1018              n_out = nj + 1
1019              out_box.append(self.numberFile_(out,str(n_out)))
1020          return out_box
755        return []
1021  
1022      def prepareSteeringCards(self):
1023          """
# Line 768 | Line 1033 | class Cmssw(JobType):
1033          txt = '\n'
1034          txt += '# directory content\n'
1035          txt += 'ls \n'
1036 <        file_list = ''
1037 <        for fileWithSuffix in self.output_file:
1036 >
1037 >        for fileWithSuffix in (self.output_file+self.output_file_sandbox):
1038              output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
774            file_list=file_list+output_file_num+' '
1039              txt += '\n'
1040              txt += '# check output file\n'
1041              txt += 'ls '+fileWithSuffix+'\n'
1042 <            txt += 'exe_result=$?\n'
1043 <            txt += 'if [ $exe_result -ne 0 ] ; then\n'
1044 <            txt += '   echo "ERROR: No output file to manage"\n'
781 <            txt += '   echo "JOB_EXIT_STATUS = $exe_result"\n'
782 <            txt += '   echo "JobExitCode=60302" | tee -a $RUNTIME_AREA/$repo\n'
783 <            txt += '   dumpStatus $RUNTIME_AREA/$repo\n'
784 <            ### OLI_DANIELE
1042 >            txt += 'ls_result=$?\n'
1043 >            txt += 'if [ $ls_result -ne 0 ] ; then\n'
1044 >            txt += '   echo "ERROR: Problem with output file"\n'
1045              if common.scheduler.boss_scheduler_name == 'condor_g':
1046                  txt += '    if [ $middleware == OSG ]; then \n'
1047                  txt += '        echo "prepare dummy output file"\n'
# Line 792 | Line 1052 | class Cmssw(JobType):
1052              txt += 'fi\n'
1053        
1054          txt += 'cd $RUNTIME_AREA\n'
1055 <        file_list=file_list[:-1]
796 <        txt += 'file_list="'+file_list+'"\n'
1055 >        txt += 'cd $RUNTIME_AREA\n'
1056          ### OLI_DANIELE
1057          txt += 'if [ $middleware == OSG ]; then\n'  
1058          txt += '    cd $RUNTIME_AREA\n'
# Line 804 | Line 1063 | class Cmssw(JobType):
1063          txt += '        echo "JOB_EXIT_STATUS = 60999"\n'
1064          txt += '        echo "JobExitCode=60999" | tee -a $RUNTIME_AREA/$repo\n'
1065          txt += '        dumpStatus $RUNTIME_AREA/$repo\n'
1066 +        txt += '        rm -f $RUNTIME_AREA/$repo \n'
1067 +        txt += '        echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1068 +        txt += '        echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1069          txt += '    fi\n'
1070          txt += 'fi\n'
1071          txt += '\n'
1072 +
1073 +        file_list = ''
1074 +        ## Add to filelist only files to be possibly copied to SE
1075 +        for fileWithSuffix in self.output_file:
1076 +            output_file_num = self.numberFile_(fileWithSuffix, '$NJob')
1077 +            file_list=file_list+output_file_num+' '
1078 +        file_list=file_list[:-1]
1079 +        txt += 'file_list="'+file_list+'"\n'
1080 +
1081          return txt
1082  
1083      def numberFile_(self, file, txt):
# Line 821 | Line 1092 | class Cmssw(JobType):
1092          # add "_txt"
1093          if len(p)>1:
1094            ext = p[len(p)-1]
824          #result = name + '_' + str(txt) + "." + ext
1095            result = name + '_' + txt + "." + ext
1096          else:
827          #result = name + '_' + str(txt)
1097            result = name + '_' + txt
1098          
1099          return result
1100  
1101 <    def getRequirements(self):
1101 >    def getRequirements(self, nj=[]):
1102          """
1103          return job requirements to add to jdl files
1104          """
1105          req = ''
1106 <        if common.analisys_common_info['sw_version']:
1106 >        if self.version:
1107              req='Member("VO-cms-' + \
1108 <                 common.analisys_common_info['sw_version'] + \
1108 >                 self.version + \
1109                   '", other.GlueHostApplicationSoftwareRunTimeEnvironment)'
1110 <        if common.analisys_common_info['sites']:
1111 <            if len(common.analisys_common_info['sites'])>0:
1112 <                req = req + ' && ('
844 <                for i in range(len(common.analisys_common_info['sites'])):
845 <                    req = req + 'other.GlueCEInfoHostName == "' \
846 <                         + common.analisys_common_info['sites'][i] + '"'
847 <                    if ( i < (int(len(common.analisys_common_info['sites']) - 1)) ):
848 <                        req = req + ' || '
849 <            req = req + ')'
850 <        #print "req = ", req
1110 >
1111 >        req = req + ' && (other.GlueHostNetworkAdapterOutboundIP)'
1112 >
1113          return req
1114  
1115      def configFilename(self):
# Line 865 | Line 1127 | class Cmssw(JobType):
1127          txt += '   if [ -f $GRID3_APP_DIR/cmssoft/cmsset_default.sh ] ;then\n'
1128          txt += '      # Use $GRID3_APP_DIR/cmssoft/cmsset_default.sh to setup cms software\n'
1129          txt += '       source $GRID3_APP_DIR/cmssoft/cmsset_default.sh '+self.version+'\n'
1130 <        txt += '   elif [ -f $OSG_APP/cmssoft/cmsset_default.sh ] ;then\n'
1131 <        txt += '      # Use $OSG_APP/cmssoft/cmsset_default.sh to setup cms software\n'
1132 <        txt += '       source $OSG_APP/cmssoft/cmsset_default.sh '+self.version+'\n'
1130 >        txt += '   elif [ -f $OSG_APP/cmssoft/cms/cmsset_default.sh ] ;then\n'
1131 >        txt += '      # Use $OSG_APP/cmssoft/cms/cmsset_default.sh to setup cms software\n'
1132 >        txt += '       source $OSG_APP/cmssoft/cms/cmsset_default.sh '+self.version+'\n'
1133          txt += '   else\n'
1134 <        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1134 >        txt += '       echo "SET_CMS_ENV 10020 ==> ERROR $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1135          txt += '       echo "JOB_EXIT_STATUS = 10020"\n'
1136          txt += '       echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1137          txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1138 +        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1139 +        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1140 +        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1141          txt += '       exit 1\n'
1142          txt += '\n'
1143          txt += '       echo "Remove working directory: $WORKING_DIR"\n'
1144          txt += '       cd $RUNTIME_AREA\n'
1145          txt += '       /bin/rm -rf $WORKING_DIR\n'
1146          txt += '       if [ -d $WORKING_DIR ] ;then\n'
1147 <        txt += '            echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cmsset_default.sh file not found"\n'
1147 >        txt += '            echo "SET_CMS_ENV 10017 ==> OSG $WORKING_DIR could not be deleted on WN `hostname` after $GRID3_APP_DIR/cmssoft/cmsset_default.sh and $OSG_APP/cmssoft/cms/cmsset_default.sh file not found"\n'
1148          txt += '            echo "JOB_EXIT_STATUS = 10017"\n'
1149          txt += '            echo "JobExitCode=10017" | tee -a $RUNTIME_AREA/$repo\n'
1150          txt += '            dumpStatus $RUNTIME_AREA/$repo\n'
1151 +        txt += '            rm -f $RUNTIME_AREA/$repo \n'
1152 +        txt += '            echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1153 +        txt += '            echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1154          txt += '       fi\n'
1155          txt += '\n'
1156          txt += '       exit 1\n'
# Line 906 | Line 1174 | class Cmssw(JobType):
1174          txt += '       echo "JOB_EXIT_STATUS = 10031" \n'
1175          txt += '       echo "JobExitCode=10031" | tee -a $RUNTIME_AREA/$repo\n'
1176          txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
1177 +        txt += '       rm -f $RUNTIME_AREA/$repo \n'
1178 +        txt += '       echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1179 +        txt += '       echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1180          txt += '       exit 1\n'
1181          txt += '   else\n'
1182          txt += '       echo "Sourcing environment... "\n'
# Line 914 | Line 1185 | class Cmssw(JobType):
1185          txt += '           echo "JOB_EXIT_STATUS = 10020"\n'
1186          txt += '           echo "JobExitCode=10020" | tee -a $RUNTIME_AREA/$repo\n'
1187          txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1188 +        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1189 +        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1190 +        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1191          txt += '           exit 1\n'
1192          txt += '       fi\n'
1193          txt += '       echo "sourcing $VO_CMS_SW_DIR/cmsset_default.sh"\n'
# Line 924 | Line 1198 | class Cmssw(JobType):
1198          txt += '           echo "JOB_EXIT_STATUS = 10032"\n'
1199          txt += '           echo "JobExitCode=10032" | tee -a $RUNTIME_AREA/$repo\n'
1200          txt += '           dumpStatus $RUNTIME_AREA/$repo\n'
1201 +        txt += '           rm -f $RUNTIME_AREA/$repo \n'
1202 +        txt += '           echo "MonitorJobID=`echo $MonitorJobID`" | tee -a $RUNTIME_AREA/$repo \n'
1203 +        txt += '           echo "MonitorID=`echo $MonitorID`" | tee -a $RUNTIME_AREA/$repo\n'
1204          txt += '           exit 1\n'
1205          txt += '       fi\n'
1206          txt += '   fi\n'
1207          txt += '   \n'
931        txt += '   string=`cat /etc/redhat-release`\n'
932        txt += '   echo $string\n'
933        txt += '   if [[ $string = *alhalla* ]]; then\n'
934        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
935        txt += '   elif [[ $string = *Enterprise* ]] || [[ $string = *cientific* ]]; then\n'
936        txt += '       export SCRAM_ARCH=slc3_ia32_gcc323\n'
937        txt += '       echo "SCRAM_ARCH= $SCRAM_ARCH"\n'
938        txt += '   else\n'
939        txt += '       echo "SET_CMS_ENV 10033 ==> ERROR OS unknown, LCG environment not initialized"\n'
940        txt += '       echo "JOB_EXIT_STATUS = 10033"\n'
941        txt += '       echo "JobExitCode=10033" | tee -a $RUNTIME_AREA/$repo\n'
942        txt += '       dumpStatus $RUNTIME_AREA/$repo\n'
943        txt += '       exit 1\n'
944        txt += '   fi\n'
1208          txt += '   echo "SET_CMS_ENV 0 ==> setup cms environment ok"\n'
1209          txt += '   echo "### END SETUP CMS LCG ENVIRONMENT ###"\n'
1210          return txt
# Line 957 | Line 1220 | class Cmssw(JobType):
1220          
1221      def getTaskid(self):
1222          return self._taskId
1223 +
1224 + #######################################################################
1225 +    def uniquelist(self, old):
1226 +        """
1227 +        remove duplicates from a list
1228 +        """
1229 +        nd={}
1230 +        for e in old:
1231 +            nd[e]=0
1232 +        return nd.keys()

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines